ansible-playbook [core 2.17.12] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-ctS executable location = /usr/local/bin/ansible-playbook python version = 3.12.11 (main, Jun 4 2025, 00:00:00) [GCC 14.2.1 20250110 (Red Hat 14.2.1-8)] (/usr/bin/python3.12) jinja version = 3.1.6 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'jsonl', as we already have a stdout callback. Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_quadlet_basic.yml ********************************************** 2 plays in /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:5 Tuesday 08 July 2025 08:52:13 -0400 (0:00:00.017) 0:00:00.017 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/tmp/podman-JFj/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Ensure that the role can manage quadlet specs] *************************** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:9 Tuesday 08 July 2025 08:52:13 -0400 (0:00:00.054) 0:00:00.072 ********** [WARNING]: Platform linux on host managed-node1 is using the discovered Python interpreter at /usr/bin/python3.12, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node1] TASK [Test is only supported on x86_64] **************************************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:55 Tuesday 08 July 2025 08:52:14 -0400 (0:00:01.154) 0:00:01.227 ********** skipping: [managed-node1] => { "false_condition": "ansible_facts[\"architecture\"] != \"x86_64\"" } TASK [End test] **************************************************************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:62 Tuesday 08 July 2025 08:52:14 -0400 (0:00:00.014) 0:00:01.241 ********** META: end_play conditional evaluated to False, continuing play skipping: [managed-node1] => { "skip_reason": "end_play conditional evaluated to False, continuing play" } MSG: end_play TASK [Run role - do not pull images] ******************************************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:70 Tuesday 08 July 2025 08:52:14 -0400 (0:00:00.007) 0:00:01.249 ********** included: fedora.linux_system_roles.podman for managed-node1 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Tuesday 08 July 2025 08:52:14 -0400 (0:00:00.048) 0:00:01.297 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Tuesday 08 July 2025 08:52:14 -0400 (0:00:00.021) 0:00:01.319 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Tuesday 08 July 2025 08:52:14 -0400 (0:00:00.031) 0:00:01.350 ********** ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Tuesday 08 July 2025 08:52:14 -0400 (0:00:00.437) 0:00:01.788 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Tuesday 08 July 2025 08:52:15 -0400 (0:00:00.022) 0:00:01.811 ********** ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Tuesday 08 July 2025 08:52:15 -0400 (0:00:00.372) 0:00:02.184 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Tuesday 08 July 2025 08:52:15 -0400 (0:00:00.022) 0:00:02.207 ********** ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node1] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Tuesday 08 July 2025 08:52:15 -0400 (0:00:00.049) 0:00:02.257 ********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Tuesday 08 July 2025 08:52:16 -0400 (0:00:01.198) 0:00:03.455 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Tuesday 08 July 2025 08:52:16 -0400 (0:00:00.068) 0:00:03.524 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Tuesday 08 July 2025 08:52:16 -0400 (0:00:00.079) 0:00:03.604 ********** skipping: [managed-node1] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Tuesday 08 July 2025 08:52:16 -0400 (0:00:00.100) 0:00:03.704 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Tuesday 08 July 2025 08:52:16 -0400 (0:00:00.067) 0:00:03.772 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Tuesday 08 July 2025 08:52:17 -0400 (0:00:00.059) 0:00:03.832 ********** ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.023824", "end": "2025-07-08 08:52:17.460410", "rc": 0, "start": "2025-07-08 08:52:17.436586" } STDOUT: podman version 5.5.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Tuesday 08 July 2025 08:52:17 -0400 (0:00:00.494) 0:00:04.327 ********** ok: [managed-node1] => { "ansible_facts": { "podman_version": "5.5.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Tuesday 08 July 2025 08:52:17 -0400 (0:00:00.034) 0:00:04.362 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Tuesday 08 July 2025 08:52:17 -0400 (0:00:00.034) 0:00:04.396 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Tuesday 08 July 2025 08:52:17 -0400 (0:00:00.060) 0:00:04.456 ********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Tuesday 08 July 2025 08:52:17 -0400 (0:00:00.052) 0:00:04.509 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Tuesday 08 July 2025 08:52:17 -0400 (0:00:00.062) 0:00:04.571 ********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Tuesday 08 July 2025 08:52:17 -0400 (0:00:00.043) 0:00:04.614 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 08 July 2025 08:52:17 -0400 (0:00:00.080) 0:00:04.695 ********** ok: [managed-node1] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 08 July 2025 08:52:18 -0400 (0:00:00.589) 0:00:05.284 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 08 July 2025 08:52:18 -0400 (0:00:00.055) 0:00:05.340 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 08 July 2025 08:52:18 -0400 (0:00:00.066) 0:00:05.406 ********** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1751978876.9732866, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "fa9845e044ad8d1bfcc68a2c8e62c8d83a1bb20e", "ctime": 1751978868.9392545, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 8668983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748217600.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "3012528632", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 08 July 2025 08:52:19 -0400 (0:00:00.413) 0:00:05.820 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 08 July 2025 08:52:19 -0400 (0:00:00.033) 0:00:05.854 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 08 July 2025 08:52:19 -0400 (0:00:00.040) 0:00:05.894 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 08 July 2025 08:52:19 -0400 (0:00:00.051) 0:00:05.945 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 08 July 2025 08:52:19 -0400 (0:00:00.054) 0:00:06.000 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 08 July 2025 08:52:19 -0400 (0:00:00.053) 0:00:06.054 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 08 July 2025 08:52:19 -0400 (0:00:00.049) 0:00:06.103 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 08 July 2025 08:52:19 -0400 (0:00:00.049) 0:00:06.153 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Tuesday 08 July 2025 08:52:19 -0400 (0:00:00.051) 0:00:06.204 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Tuesday 08 July 2025 08:52:19 -0400 (0:00:00.086) 0:00:06.290 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Tuesday 08 July 2025 08:52:19 -0400 (0:00:00.098) 0:00:06.388 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Tuesday 08 July 2025 08:52:19 -0400 (0:00:00.034) 0:00:06.423 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Tuesday 08 July 2025 08:52:19 -0400 (0:00:00.027) 0:00:06.450 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Tuesday 08 July 2025 08:52:19 -0400 (0:00:00.054) 0:00:06.505 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Tuesday 08 July 2025 08:52:19 -0400 (0:00:00.027) 0:00:06.533 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Tuesday 08 July 2025 08:52:19 -0400 (0:00:00.028) 0:00:06.561 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Tuesday 08 July 2025 08:52:19 -0400 (0:00:00.055) 0:00:06.617 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Tuesday 08 July 2025 08:52:19 -0400 (0:00:00.028) 0:00:06.645 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Tuesday 08 July 2025 08:52:19 -0400 (0:00:00.027) 0:00:06.672 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Tuesday 08 July 2025 08:52:19 -0400 (0:00:00.057) 0:00:06.730 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Tuesday 08 July 2025 08:52:19 -0400 (0:00:00.027) 0:00:06.758 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Tuesday 08 July 2025 08:52:19 -0400 (0:00:00.037) 0:00:06.795 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Tuesday 08 July 2025 08:52:20 -0400 (0:00:00.030) 0:00:06.826 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Tuesday 08 July 2025 08:52:20 -0400 (0:00:00.028) 0:00:06.854 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Tuesday 08 July 2025 08:52:20 -0400 (0:00:00.026) 0:00:06.880 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Tuesday 08 July 2025 08:52:20 -0400 (0:00:00.053) 0:00:06.934 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Tuesday 08 July 2025 08:52:20 -0400 (0:00:00.030) 0:00:06.964 ********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Tuesday 08 July 2025 08:52:20 -0400 (0:00:00.025) 0:00:06.989 ********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Tuesday 08 July 2025 08:52:20 -0400 (0:00:00.025) 0:00:07.015 ********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Tuesday 08 July 2025 08:52:20 -0400 (0:00:00.024) 0:00:07.039 ********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Tuesday 08 July 2025 08:52:20 -0400 (0:00:00.025) 0:00:07.065 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Tuesday 08 July 2025 08:52:20 -0400 (0:00:00.069) 0:00:07.134 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "nopull", "Image": "quay.io/libpod/testimage:20210610" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Tuesday 08 July 2025 08:52:20 -0400 (0:00:00.037) 0:00:07.172 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": false, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Tuesday 08 July 2025 08:52:20 -0400 (0:00:00.034) 0:00:07.206 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Tuesday 08 July 2025 08:52:20 -0400 (0:00:00.027) 0:00:07.234 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "nopull", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Tuesday 08 July 2025 08:52:20 -0400 (0:00:00.041) 0:00:07.276 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 08 July 2025 08:52:20 -0400 (0:00:00.053) 0:00:07.329 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 08 July 2025 08:52:20 -0400 (0:00:00.030) 0:00:07.360 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 08 July 2025 08:52:20 -0400 (0:00:00.032) 0:00:07.393 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 08 July 2025 08:52:20 -0400 (0:00:00.038) 0:00:07.432 ********** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1751978876.9732866, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "fa9845e044ad8d1bfcc68a2c8e62c8d83a1bb20e", "ctime": 1751978868.9392545, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 8668983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748217600.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "3012528632", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 08 July 2025 08:52:21 -0400 (0:00:00.397) 0:00:07.830 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 08 July 2025 08:52:21 -0400 (0:00:00.064) 0:00:07.894 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 08 July 2025 08:52:21 -0400 (0:00:00.031) 0:00:07.925 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 08 July 2025 08:52:21 -0400 (0:00:00.031) 0:00:07.957 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 08 July 2025 08:52:21 -0400 (0:00:00.030) 0:00:07.987 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 08 July 2025 08:52:21 -0400 (0:00:00.030) 0:00:08.017 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 08 July 2025 08:52:21 -0400 (0:00:00.030) 0:00:08.048 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 08 July 2025 08:52:21 -0400 (0:00:00.030) 0:00:08.078 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Tuesday 08 July 2025 08:52:21 -0400 (0:00:00.029) 0:00:08.107 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": false, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "nopull.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Tuesday 08 July 2025 08:52:21 -0400 (0:00:00.047) 0:00:08.155 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Tuesday 08 July 2025 08:52:21 -0400 (0:00:00.032) 0:00:08.188 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88 Tuesday 08 July 2025 08:52:21 -0400 (0:00:00.030) 0:00:08.218 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/etc/containers/systemd/nopull.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106 Tuesday 08 July 2025 08:52:21 -0400 (0:00:00.068) 0:00:08.287 ********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113 Tuesday 08 July 2025 08:52:21 -0400 (0:00:00.035) 0:00:08.323 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:117 Tuesday 08 July 2025 08:52:21 -0400 (0:00:00.026) 0:00:08.349 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Tuesday 08 July 2025 08:52:21 -0400 (0:00:00.065) 0:00:08.414 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Tuesday 08 July 2025 08:52:21 -0400 (0:00:00.048) 0:00:08.462 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Tuesday 08 July 2025 08:52:21 -0400 (0:00:00.028) 0:00:08.490 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Tuesday 08 July 2025 08:52:21 -0400 (0:00:00.053) 0:00:08.544 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Tuesday 08 July 2025 08:52:21 -0400 (0:00:00.028) 0:00:08.572 ********** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Tuesday 08 July 2025 08:52:21 -0400 (0:00:00.025) 0:00:08.598 ********** skipping: [managed-node1] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Tuesday 08 July 2025 08:52:21 -0400 (0:00:00.033) 0:00:08.631 ********** ok: [managed-node1] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 43, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:50 Tuesday 08 July 2025 08:52:22 -0400 (0:00:00.481) 0:00:09.112 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:62 Tuesday 08 July 2025 08:52:22 -0400 (0:00:00.032) 0:00:09.144 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75 Tuesday 08 July 2025 08:52:22 -0400 (0:00:00.030) 0:00:09.175 ********** changed: [managed-node1] => { "changed": true, "checksum": "670d64fc68a9768edb20cad26df2acc703542d85", "dest": "/etc/containers/systemd/nopull.container", "gid": 0, "group": "root", "md5sum": "cedb6667f6cd1b033fe06e2810fe6b19", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 151, "src": "/root/.ansible/tmp/ansible-tmp-1751979142.423505-17133-38145694146989/.source.container", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87 Tuesday 08 July 2025 08:52:23 -0400 (0:00:00.798) 0:00:09.974 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115 Tuesday 08 July 2025 08:52:23 -0400 (0:00:00.032) 0:00:10.006 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:131 Tuesday 08 July 2025 08:52:23 -0400 (0:00:00.034) 0:00:10.041 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Tuesday 08 July 2025 08:52:23 -0400 (0:00:00.036) 0:00:10.077 ********** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204 Tuesday 08 July 2025 08:52:23 -0400 (0:00:00.025) 0:00:10.103 ********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213 Tuesday 08 July 2025 08:52:23 -0400 (0:00:00.026) 0:00:10.129 ********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Verify image not pulled] ************************************************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:86 Tuesday 08 July 2025 08:52:23 -0400 (0:00:00.041) 0:00:10.171 ********** ok: [managed-node1] => { "changed": false } MSG: All assertions passed TASK [Run role - try to pull bogus image] ************************************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:90 Tuesday 08 July 2025 08:52:23 -0400 (0:00:00.031) 0:00:10.203 ********** included: fedora.linux_system_roles.podman for managed-node1 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Tuesday 08 July 2025 08:52:23 -0400 (0:00:00.089) 0:00:10.293 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Tuesday 08 July 2025 08:52:23 -0400 (0:00:00.082) 0:00:10.375 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Tuesday 08 July 2025 08:52:23 -0400 (0:00:00.035) 0:00:10.411 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Tuesday 08 July 2025 08:52:23 -0400 (0:00:00.029) 0:00:10.441 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Tuesday 08 July 2025 08:52:23 -0400 (0:00:00.027) 0:00:10.468 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Tuesday 08 July 2025 08:52:23 -0400 (0:00:00.030) 0:00:10.498 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Tuesday 08 July 2025 08:52:23 -0400 (0:00:00.029) 0:00:10.528 ********** ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node1] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Tuesday 08 July 2025 08:52:23 -0400 (0:00:00.063) 0:00:10.592 ********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Tuesday 08 July 2025 08:52:24 -0400 (0:00:00.951) 0:00:11.543 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Tuesday 08 July 2025 08:52:24 -0400 (0:00:00.029) 0:00:11.572 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Tuesday 08 July 2025 08:52:24 -0400 (0:00:00.034) 0:00:11.607 ********** skipping: [managed-node1] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Tuesday 08 July 2025 08:52:24 -0400 (0:00:00.028) 0:00:11.635 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Tuesday 08 July 2025 08:52:24 -0400 (0:00:00.028) 0:00:11.663 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Tuesday 08 July 2025 08:52:24 -0400 (0:00:00.027) 0:00:11.691 ********** ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.026742", "end": "2025-07-08 08:52:25.227826", "rc": 0, "start": "2025-07-08 08:52:25.201084" } STDOUT: podman version 5.5.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Tuesday 08 July 2025 08:52:25 -0400 (0:00:00.400) 0:00:12.092 ********** ok: [managed-node1] => { "ansible_facts": { "podman_version": "5.5.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Tuesday 08 July 2025 08:52:25 -0400 (0:00:00.033) 0:00:12.125 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Tuesday 08 July 2025 08:52:25 -0400 (0:00:00.062) 0:00:12.187 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Tuesday 08 July 2025 08:52:25 -0400 (0:00:00.032) 0:00:12.220 ********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Tuesday 08 July 2025 08:52:25 -0400 (0:00:00.034) 0:00:12.255 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Tuesday 08 July 2025 08:52:25 -0400 (0:00:00.039) 0:00:12.294 ********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Tuesday 08 July 2025 08:52:25 -0400 (0:00:00.043) 0:00:12.338 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 08 July 2025 08:52:25 -0400 (0:00:00.058) 0:00:12.397 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 08 July 2025 08:52:25 -0400 (0:00:00.033) 0:00:12.430 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 08 July 2025 08:52:25 -0400 (0:00:00.032) 0:00:12.463 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 08 July 2025 08:52:25 -0400 (0:00:00.039) 0:00:12.502 ********** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1751978876.9732866, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "fa9845e044ad8d1bfcc68a2c8e62c8d83a1bb20e", "ctime": 1751978868.9392545, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 8668983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748217600.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "3012528632", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.380) 0:00:12.883 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.033) 0:00:12.916 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.039) 0:00:12.955 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.029) 0:00:12.985 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.029) 0:00:13.014 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.029) 0:00:13.044 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.060) 0:00:13.104 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.032) 0:00:13.137 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.031) 0:00:13.168 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.039) 0:00:13.207 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.056) 0:00:13.263 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.028) 0:00:13.291 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.029) 0:00:13.321 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.055) 0:00:13.377 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.030) 0:00:13.407 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.029) 0:00:13.436 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.057) 0:00:13.494 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.030) 0:00:13.524 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.029) 0:00:13.554 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.059) 0:00:13.614 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.029) 0:00:13.643 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.063) 0:00:13.706 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.029) 0:00:13.736 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.029) 0:00:13.766 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Tuesday 08 July 2025 08:52:26 -0400 (0:00:00.029) 0:00:13.795 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Tuesday 08 July 2025 08:52:27 -0400 (0:00:00.028) 0:00:13.823 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Tuesday 08 July 2025 08:52:27 -0400 (0:00:00.028) 0:00:13.852 ********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Tuesday 08 July 2025 08:52:27 -0400 (0:00:00.025) 0:00:13.877 ********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Tuesday 08 July 2025 08:52:27 -0400 (0:00:00.025) 0:00:13.903 ********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Tuesday 08 July 2025 08:52:27 -0400 (0:00:00.024) 0:00:13.928 ********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Tuesday 08 July 2025 08:52:27 -0400 (0:00:00.035) 0:00:13.964 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Tuesday 08 July 2025 08:52:27 -0400 (0:00:00.073) 0:00:14.037 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "bogus", "Image": "this_is_a_bogus_image" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Tuesday 08 July 2025 08:52:27 -0400 (0:00:00.039) 0:00:14.077 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": true, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Tuesday 08 July 2025 08:52:27 -0400 (0:00:00.038) 0:00:14.115 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Tuesday 08 July 2025 08:52:27 -0400 (0:00:00.028) 0:00:14.144 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "bogus", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Tuesday 08 July 2025 08:52:27 -0400 (0:00:00.043) 0:00:14.187 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 08 July 2025 08:52:27 -0400 (0:00:00.054) 0:00:14.242 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 08 July 2025 08:52:27 -0400 (0:00:00.067) 0:00:14.309 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 08 July 2025 08:52:27 -0400 (0:00:00.032) 0:00:14.342 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 08 July 2025 08:52:27 -0400 (0:00:00.042) 0:00:14.385 ********** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1751978876.9732866, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "fa9845e044ad8d1bfcc68a2c8e62c8d83a1bb20e", "ctime": 1751978868.9392545, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 8668983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748217600.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "3012528632", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 08 July 2025 08:52:27 -0400 (0:00:00.386) 0:00:14.772 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 08 July 2025 08:52:28 -0400 (0:00:00.031) 0:00:14.803 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 08 July 2025 08:52:28 -0400 (0:00:00.029) 0:00:14.833 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 08 July 2025 08:52:28 -0400 (0:00:00.030) 0:00:14.863 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 08 July 2025 08:52:28 -0400 (0:00:00.030) 0:00:14.893 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 08 July 2025 08:52:28 -0400 (0:00:00.029) 0:00:14.923 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 08 July 2025 08:52:28 -0400 (0:00:00.030) 0:00:14.953 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 08 July 2025 08:52:28 -0400 (0:00:00.040) 0:00:14.993 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Tuesday 08 July 2025 08:52:28 -0400 (0:00:00.032) 0:00:15.026 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": false, "__podman_images_found": [ "this_is_a_bogus_image" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "bogus.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Tuesday 08 July 2025 08:52:28 -0400 (0:00:00.048) 0:00:15.075 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Tuesday 08 July 2025 08:52:28 -0400 (0:00:00.031) 0:00:15.106 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88 Tuesday 08 July 2025 08:52:28 -0400 (0:00:00.031) 0:00:15.137 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_images": [ "this_is_a_bogus_image" ], "__podman_quadlet_file": "/etc/containers/systemd/bogus.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106 Tuesday 08 July 2025 08:52:28 -0400 (0:00:00.069) 0:00:15.207 ********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113 Tuesday 08 July 2025 08:52:28 -0400 (0:00:00.034) 0:00:15.242 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:117 Tuesday 08 July 2025 08:52:28 -0400 (0:00:00.027) 0:00:15.269 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Tuesday 08 July 2025 08:52:28 -0400 (0:00:00.093) 0:00:15.363 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Tuesday 08 July 2025 08:52:28 -0400 (0:00:00.049) 0:00:15.412 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Tuesday 08 July 2025 08:52:28 -0400 (0:00:00.029) 0:00:15.441 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Tuesday 08 July 2025 08:52:28 -0400 (0:00:00.026) 0:00:15.468 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Tuesday 08 July 2025 08:52:28 -0400 (0:00:00.027) 0:00:15.496 ********** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Tuesday 08 July 2025 08:52:28 -0400 (0:00:00.025) 0:00:15.522 ********** ok: [managed-node1] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Tuesday 08 July 2025 08:52:29 -0400 (0:00:00.588) 0:00:16.111 ********** ok: [managed-node1] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 67, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:50 Tuesday 08 July 2025 08:52:29 -0400 (0:00:00.389) 0:00:16.501 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:62 Tuesday 08 July 2025 08:52:29 -0400 (0:00:00.032) 0:00:16.533 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75 Tuesday 08 July 2025 08:52:29 -0400 (0:00:00.031) 0:00:16.565 ********** changed: [managed-node1] => { "changed": true, "checksum": "1d087e679d135214e8ac9ccaf33b2222916efb7f", "dest": "/etc/containers/systemd/bogus.container", "gid": 0, "group": "root", "md5sum": "97480a9a73734d9f8007d2c06e7fed1f", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 138, "src": "/root/.ansible/tmp/ansible-tmp-1751979149.8133175-17320-179092701161425/.source.container", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87 Tuesday 08 July 2025 08:52:30 -0400 (0:00:00.716) 0:00:17.281 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115 Tuesday 08 July 2025 08:52:30 -0400 (0:00:00.032) 0:00:17.314 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:131 Tuesday 08 July 2025 08:52:30 -0400 (0:00:00.034) 0:00:17.349 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Tuesday 08 July 2025 08:52:30 -0400 (0:00:00.043) 0:00:17.392 ********** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204 Tuesday 08 July 2025 08:52:30 -0400 (0:00:00.044) 0:00:17.437 ********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213 Tuesday 08 July 2025 08:52:30 -0400 (0:00:00.044) 0:00:17.482 ********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Verify image not pulled and no error] ************************************ task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:106 Tuesday 08 July 2025 08:52:30 -0400 (0:00:00.046) 0:00:17.528 ********** ok: [managed-node1] => { "changed": false } MSG: All assertions passed TASK [Cleanup] ***************************************************************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:113 Tuesday 08 July 2025 08:52:30 -0400 (0:00:00.084) 0:00:17.612 ********** included: fedora.linux_system_roles.podman for managed-node1 => (item=nopull) included: fedora.linux_system_roles.podman for managed-node1 => (item=bogus) TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Tuesday 08 July 2025 08:52:30 -0400 (0:00:00.159) 0:00:17.772 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Tuesday 08 July 2025 08:52:31 -0400 (0:00:00.057) 0:00:17.829 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Tuesday 08 July 2025 08:52:31 -0400 (0:00:00.058) 0:00:17.888 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Tuesday 08 July 2025 08:52:31 -0400 (0:00:00.044) 0:00:17.932 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Tuesday 08 July 2025 08:52:31 -0400 (0:00:00.034) 0:00:17.967 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Tuesday 08 July 2025 08:52:31 -0400 (0:00:00.037) 0:00:18.004 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Tuesday 08 July 2025 08:52:31 -0400 (0:00:00.041) 0:00:18.046 ********** [WARNING]: TASK: fedora.linux_system_roles.podman : Set platform/version specific variables: The loop variable 'item' is already in use. You should set the `loop_var` value in the `loop_control` option for the task to something else to avoid variable collisions and unexpected behavior. ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node1] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Tuesday 08 July 2025 08:52:31 -0400 (0:00:00.080) 0:00:18.126 ********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Tuesday 08 July 2025 08:52:32 -0400 (0:00:01.040) 0:00:19.166 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Tuesday 08 July 2025 08:52:32 -0400 (0:00:00.052) 0:00:19.219 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Tuesday 08 July 2025 08:52:32 -0400 (0:00:00.061) 0:00:19.280 ********** skipping: [managed-node1] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Tuesday 08 July 2025 08:52:32 -0400 (0:00:00.099) 0:00:19.380 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Tuesday 08 July 2025 08:52:32 -0400 (0:00:00.052) 0:00:19.432 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Tuesday 08 July 2025 08:52:32 -0400 (0:00:00.046) 0:00:19.479 ********** ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.024370", "end": "2025-07-08 08:52:33.043417", "rc": 0, "start": "2025-07-08 08:52:33.019047" } STDOUT: podman version 5.5.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Tuesday 08 July 2025 08:52:33 -0400 (0:00:00.427) 0:00:19.907 ********** ok: [managed-node1] => { "ansible_facts": { "podman_version": "5.5.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Tuesday 08 July 2025 08:52:33 -0400 (0:00:00.031) 0:00:19.938 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Tuesday 08 July 2025 08:52:33 -0400 (0:00:00.028) 0:00:19.966 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Tuesday 08 July 2025 08:52:33 -0400 (0:00:00.035) 0:00:20.002 ********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Tuesday 08 July 2025 08:52:33 -0400 (0:00:00.050) 0:00:20.052 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Tuesday 08 July 2025 08:52:33 -0400 (0:00:00.072) 0:00:20.125 ********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Tuesday 08 July 2025 08:52:33 -0400 (0:00:00.076) 0:00:20.201 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 08 July 2025 08:52:33 -0400 (0:00:00.101) 0:00:20.303 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 08 July 2025 08:52:33 -0400 (0:00:00.051) 0:00:20.354 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 08 July 2025 08:52:33 -0400 (0:00:00.055) 0:00:20.409 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 08 July 2025 08:52:33 -0400 (0:00:00.065) 0:00:20.475 ********** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1751978876.9732866, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "fa9845e044ad8d1bfcc68a2c8e62c8d83a1bb20e", "ctime": 1751978868.9392545, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 8668983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748217600.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "3012528632", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 08 July 2025 08:52:34 -0400 (0:00:00.431) 0:00:20.906 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 08 July 2025 08:52:34 -0400 (0:00:00.102) 0:00:21.008 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 08 July 2025 08:52:34 -0400 (0:00:00.037) 0:00:21.046 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 08 July 2025 08:52:34 -0400 (0:00:00.037) 0:00:21.083 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 08 July 2025 08:52:34 -0400 (0:00:00.035) 0:00:21.119 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 08 July 2025 08:52:34 -0400 (0:00:00.035) 0:00:21.154 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 08 July 2025 08:52:34 -0400 (0:00:00.030) 0:00:21.184 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 08 July 2025 08:52:34 -0400 (0:00:00.042) 0:00:21.226 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Tuesday 08 July 2025 08:52:34 -0400 (0:00:00.030) 0:00:21.257 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Tuesday 08 July 2025 08:52:34 -0400 (0:00:00.038) 0:00:21.296 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Tuesday 08 July 2025 08:52:34 -0400 (0:00:00.062) 0:00:21.358 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Tuesday 08 July 2025 08:52:34 -0400 (0:00:00.048) 0:00:21.406 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Tuesday 08 July 2025 08:52:34 -0400 (0:00:00.052) 0:00:21.459 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Tuesday 08 July 2025 08:52:34 -0400 (0:00:00.095) 0:00:21.554 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Tuesday 08 July 2025 08:52:34 -0400 (0:00:00.048) 0:00:21.602 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Tuesday 08 July 2025 08:52:34 -0400 (0:00:00.045) 0:00:21.648 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Tuesday 08 July 2025 08:52:34 -0400 (0:00:00.068) 0:00:21.716 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Tuesday 08 July 2025 08:52:34 -0400 (0:00:00.074) 0:00:21.791 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Tuesday 08 July 2025 08:52:35 -0400 (0:00:00.030) 0:00:21.822 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Tuesday 08 July 2025 08:52:35 -0400 (0:00:00.060) 0:00:21.882 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Tuesday 08 July 2025 08:52:35 -0400 (0:00:00.028) 0:00:21.910 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Tuesday 08 July 2025 08:52:35 -0400 (0:00:00.029) 0:00:21.940 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Tuesday 08 July 2025 08:52:35 -0400 (0:00:00.028) 0:00:21.968 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Tuesday 08 July 2025 08:52:35 -0400 (0:00:00.029) 0:00:21.998 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Tuesday 08 July 2025 08:52:35 -0400 (0:00:00.028) 0:00:22.026 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Tuesday 08 July 2025 08:52:35 -0400 (0:00:00.028) 0:00:22.055 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Tuesday 08 July 2025 08:52:35 -0400 (0:00:00.029) 0:00:22.084 ********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Tuesday 08 July 2025 08:52:35 -0400 (0:00:00.025) 0:00:22.110 ********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Tuesday 08 July 2025 08:52:35 -0400 (0:00:00.025) 0:00:22.135 ********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Tuesday 08 July 2025 08:52:35 -0400 (0:00:00.024) 0:00:22.160 ********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Tuesday 08 July 2025 08:52:35 -0400 (0:00:00.025) 0:00:22.185 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Tuesday 08 July 2025 08:52:35 -0400 (0:00:00.083) 0:00:22.269 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Tuesday 08 July 2025 08:52:35 -0400 (0:00:00.038) 0:00:22.308 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Tuesday 08 July 2025 08:52:35 -0400 (0:00:00.037) 0:00:22.345 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Tuesday 08 July 2025 08:52:35 -0400 (0:00:00.066) 0:00:22.411 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "nopull", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Tuesday 08 July 2025 08:52:35 -0400 (0:00:00.045) 0:00:22.457 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 08 July 2025 08:52:35 -0400 (0:00:00.055) 0:00:22.513 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 08 July 2025 08:52:35 -0400 (0:00:00.032) 0:00:22.545 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Tuesday 08 July 2025 08:52:35 -0400 (0:00:00.033) 0:00:22.578 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Tuesday 08 July 2025 08:52:35 -0400 (0:00:00.040) 0:00:22.618 ********** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1751978876.9732866, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "fa9845e044ad8d1bfcc68a2c8e62c8d83a1bb20e", "ctime": 1751978868.9392545, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 8668983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748217600.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "3012528632", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Tuesday 08 July 2025 08:52:36 -0400 (0:00:00.385) 0:00:23.004 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Tuesday 08 July 2025 08:52:36 -0400 (0:00:00.033) 0:00:23.037 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Tuesday 08 July 2025 08:52:36 -0400 (0:00:00.032) 0:00:23.070 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Tuesday 08 July 2025 08:52:36 -0400 (0:00:00.032) 0:00:23.102 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Tuesday 08 July 2025 08:52:36 -0400 (0:00:00.032) 0:00:23.134 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Tuesday 08 July 2025 08:52:36 -0400 (0:00:00.030) 0:00:23.165 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Tuesday 08 July 2025 08:52:36 -0400 (0:00:00.031) 0:00:23.196 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Tuesday 08 July 2025 08:52:36 -0400 (0:00:00.029) 0:00:23.226 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Tuesday 08 July 2025 08:52:36 -0400 (0:00:00.037) 0:00:23.263 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "nopull.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Tuesday 08 July 2025 08:52:36 -0400 (0:00:00.054) 0:00:23.317 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Tuesday 08 July 2025 08:52:36 -0400 (0:00:00.032) 0:00:23.350 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88 Tuesday 08 July 2025 08:52:36 -0400 (0:00:00.063) 0:00:23.414 ********** ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/nopull.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106 Tuesday 08 July 2025 08:52:36 -0400 (0:00:00.072) 0:00:23.486 ********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113 Tuesday 08 July 2025 08:52:36 -0400 (0:00:00.037) 0:00:23.524 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Tuesday 08 July 2025 08:52:36 -0400 (0:00:00.071) 0:00:23.595 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Tuesday 08 July 2025 08:52:36 -0400 (0:00:00.029) 0:00:23.624 ********** ok: [managed-node1] => { "changed": false, "failed_when_result": false } MSG: Could not find the requested service nopull.service: host TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:34 Tuesday 08 July 2025 08:52:37 -0400 (0:00:00.716) 0:00:24.341 ********** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1751979143.1103957, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "670d64fc68a9768edb20cad26df2acc703542d85", "ctime": 1751979143.1123958, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 624951496, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1751979142.7543945, "nlink": 1, "path": "/etc/containers/systemd/nopull.container", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 151, "uid": 0, "version": "394730684", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:39 Tuesday 08 July 2025 08:52:37 -0400 (0:00:00.388) 0:00:24.729 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Tuesday 08 July 2025 08:52:37 -0400 (0:00:00.055) 0:00:24.785 ********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Tuesday 08 July 2025 08:52:38 -0400 (0:00:00.449) 0:00:25.234 ********** fatal: [managed-node1]: FAILED! => {} MSG: template error while templating string: Could not load "podman_from_ini": 'podman_from_ini'. String: {{ __podman_quadlet_raw.content | b64decode | podman_from_ini }}. Could not load "podman_from_ini": 'podman_from_ini' TASK [Debug3] ****************************************************************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:270 Tuesday 08 July 2025 08:52:38 -0400 (0:00:00.036) 0:00:25.270 ********** fatal: [managed-node1]: FAILED! => { "changed": false, "cmd": "set -x\nset -o pipefail\nexec 1>&2\n#podman volume rm --all\n#podman network prune -f\npodman volume ls\npodman network ls\npodman secret ls\npodman container ls\npodman pod ls\npodman images\nsystemctl list-units | grep quadlet\n", "delta": "0:00:00.169423", "end": "2025-07-08 08:52:38.951320", "rc": 1, "start": "2025-07-08 08:52:38.781897" } STDERR: + set -o pipefail + exec + podman volume ls DRIVER VOLUME NAME + podman network ls NETWORK ID NAME DRIVER 2f259bab93aa podman bridge 90e7467522a5 podman-default-kube-network bridge + podman secret ls ID NAME DRIVER CREATED UPDATED + podman container ls CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES + podman pod ls POD ID NAME STATUS CREATED INFRA ID # OF CONTAINERS + podman images REPOSITORY TAG IMAGE ID CREATED SIZE quay.io/libpod/registry 2.8.2 0030ba3d620c 23 months ago 24.6 MB localhost:5000/libpod/testimage 20210610 9f9ec7f2fdef 4 years ago 7.99 MB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 4 years ago 7.99 MB + systemctl list-units + grep quadlet MSG: non-zero return code TASK [Cleanup user] ************************************************************ task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:299 Tuesday 08 July 2025 08:52:39 -0400 (0:00:00.544) 0:00:25.815 ********** included: fedora.linux_system_roles.podman for managed-node1 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Tuesday 08 July 2025 08:52:39 -0400 (0:00:00.072) 0:00:25.888 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Tuesday 08 July 2025 08:52:39 -0400 (0:00:00.047) 0:00:25.935 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Tuesday 08 July 2025 08:52:39 -0400 (0:00:00.035) 0:00:25.971 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Tuesday 08 July 2025 08:52:39 -0400 (0:00:00.064) 0:00:26.035 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Tuesday 08 July 2025 08:52:39 -0400 (0:00:00.030) 0:00:26.066 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Tuesday 08 July 2025 08:52:39 -0400 (0:00:00.029) 0:00:26.095 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Tuesday 08 July 2025 08:52:39 -0400 (0:00:00.028) 0:00:26.124 ********** ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node1] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node1] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Tuesday 08 July 2025 08:52:39 -0400 (0:00:00.065) 0:00:26.190 ********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Tuesday 08 July 2025 08:52:40 -0400 (0:00:00.957) 0:00:27.147 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Tuesday 08 July 2025 08:52:40 -0400 (0:00:00.029) 0:00:27.177 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Tuesday 08 July 2025 08:52:40 -0400 (0:00:00.035) 0:00:27.213 ********** skipping: [managed-node1] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Tuesday 08 July 2025 08:52:40 -0400 (0:00:00.028) 0:00:27.241 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Tuesday 08 July 2025 08:52:40 -0400 (0:00:00.029) 0:00:27.270 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Tuesday 08 July 2025 08:52:40 -0400 (0:00:00.029) 0:00:27.300 ********** ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.025166", "end": "2025-07-08 08:52:40.834777", "rc": 0, "start": "2025-07-08 08:52:40.809611" } STDOUT: podman version 5.5.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Tuesday 08 July 2025 08:52:40 -0400 (0:00:00.397) 0:00:27.698 ********** ok: [managed-node1] => { "ansible_facts": { "podman_version": "5.5.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Tuesday 08 July 2025 08:52:40 -0400 (0:00:00.033) 0:00:27.731 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Tuesday 08 July 2025 08:52:40 -0400 (0:00:00.028) 0:00:27.760 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Tuesday 08 July 2025 08:52:41 -0400 (0:00:00.060) 0:00:27.820 ********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Tuesday 08 July 2025 08:52:41 -0400 (0:00:00.100) 0:00:27.921 ********** skipping: [managed-node1] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Tuesday 08 July 2025 08:52:41 -0400 (0:00:00.061) 0:00:27.982 ********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Tuesday 08 July 2025 08:52:41 -0400 (0:00:00.061) 0:00:28.044 ********** included: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Tuesday 08 July 2025 08:52:41 -0400 (0:00:00.060) 0:00:28.104 ********** ok: [managed-node1] => { "ansible_facts": { "getent_passwd": { "user_quadlet_basic": null } }, "changed": false } MSG: One or more supplied key could not be found in the database. TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Tuesday 08 July 2025 08:52:41 -0400 (0:00:00.392) 0:00:28.496 ********** fatal: [managed-node1]: FAILED! => { "changed": false } MSG: The given podman user [user_quadlet_basic] does not exist - cannot continue TASK [Dump journal] ************************************************************ task path: /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:336 Tuesday 08 July 2025 08:52:41 -0400 (0:00:00.039) 0:00:28.535 ********** fatal: [managed-node1]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.037839", "end": "2025-07-08 08:52:42.082861", "failed_when_result": true, "rc": 0, "start": "2025-07-08 08:52:42.045022" } STDOUT: Jul 08 08:49:45 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5038. Jul 08 08:49:45 managed-node1 podman[33270]: 2025-07-08 08:49:45.593321911 -0400 EDT m=+0.085104679 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:45 managed-node1 podman[33270]: 2025-07-08 08:49:45.595600043 -0400 EDT m=+0.087382830 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:45 managed-node1 auth_test_1_kube-auth_test_1_kube[33282]: This container is intended for podman CI testing Jul 08 08:49:45 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:45 managed-node1 podman[33286]: 2025-07-08 08:49:45.636439329 -0400 EDT m=+0.028271353 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:45 managed-node1 podman[33286]: 2025-07-08 08:49:45.649856804 -0400 EDT m=+0.041688544 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:45 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5045. Jul 08 08:49:45 managed-node1 podman[33286]: 2025-07-08 08:49:45.708861153 -0400 EDT m=+0.100693111 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:45 managed-node1 podman[33286]: 2025-07-08 08:49:45.712686381 -0400 EDT m=+0.104518163 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:45 managed-node1 auth_test_1_kube-auth_test_1_kube[33350]: This container is intended for podman CI testing Jul 08 08:49:45 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:45 managed-node1 podman[33380]: 2025-07-08 08:49:45.751009794 -0400 EDT m=+0.023249277 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:45 managed-node1 podman[33380]: 2025-07-08 08:49:45.76715798 -0400 EDT m=+0.039397338 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:45 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5052. Jul 08 08:49:45 managed-node1 podman[33380]: 2025-07-08 08:49:45.82422548 -0400 EDT m=+0.096464922 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:45 managed-node1 auth_test_1_kube-auth_test_1_kube[33439]: This container is intended for podman CI testing Jul 08 08:49:45 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:45 managed-node1 conmon[33439]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:45 managed-node1 podman[33380]: 2025-07-08 08:49:45.829510409 -0400 EDT m=+0.101749551 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:45 managed-node1 podman[33448]: 2025-07-08 08:49:45.877828923 -0400 EDT m=+0.034075082 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:45 managed-node1 podman[33448]: 2025-07-08 08:49:45.892890013 -0400 EDT m=+0.049136106 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:45 managed-node1 python3.12[33446]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:49:45 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5059. Jul 08 08:49:45 managed-node1 podman[33448]: 2025-07-08 08:49:45.999158821 -0400 EDT m=+0.155405154 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:46 managed-node1 auth_test_1_kube-auth_test_1_kube[33460]: This container is intended for podman CI testing Jul 08 08:49:46 managed-node1 podman[33448]: 2025-07-08 08:49:46.005068376 -0400 EDT m=+0.161314418 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:46 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:46 managed-node1 podman[33470]: 2025-07-08 08:49:46.051486323 -0400 EDT m=+0.031554578 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:46 managed-node1 podman[33470]: 2025-07-08 08:49:46.066075246 -0400 EDT m=+0.046143244 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:46 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5066. Jul 08 08:49:46 managed-node1 podman[33470]: 2025-07-08 08:49:46.117152823 -0400 EDT m=+0.097221140 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:46 managed-node1 podman[33470]: 2025-07-08 08:49:46.1198846 -0400 EDT m=+0.099952518 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:46 managed-node1 auth_test_1_kube-auth_test_1_kube[33507]: This container is intended for podman CI testing Jul 08 08:49:46 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:46 managed-node1 podman[33511]: 2025-07-08 08:49:46.15055551 -0400 EDT m=+0.020976997 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:46 managed-node1 podman[33511]: 2025-07-08 08:49:46.163279094 -0400 EDT m=+0.033700461 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:46 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5073. Jul 08 08:49:46 managed-node1 podman[33511]: 2025-07-08 08:49:46.213217596 -0400 EDT m=+0.083638969 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:46 managed-node1 podman[33511]: 2025-07-08 08:49:46.215501868 -0400 EDT m=+0.085923273 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:46 managed-node1 auth_test_1_kube-auth_test_1_kube[33522]: This container is intended for podman CI testing Jul 08 08:49:46 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:46 managed-node1 podman[33526]: 2025-07-08 08:49:46.244185514 -0400 EDT m=+0.019318491 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:46 managed-node1 podman[33526]: 2025-07-08 08:49:46.256897767 -0400 EDT m=+0.032030708 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:46 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5080. Jul 08 08:49:46 managed-node1 podman[33526]: 2025-07-08 08:49:46.308998409 -0400 EDT m=+0.084131346 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:46 managed-node1 podman[33526]: 2025-07-08 08:49:46.311242091 -0400 EDT m=+0.086375084 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:46 managed-node1 auth_test_1_kube-auth_test_1_kube[33538]: This container is intended for podman CI testing Jul 08 08:49:46 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:46 managed-node1 conmon[33538]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:46 managed-node1 podman[33542]: 2025-07-08 08:49:46.341750805 -0400 EDT m=+0.021827804 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:46 managed-node1 podman[33542]: 2025-07-08 08:49:46.354633092 -0400 EDT m=+0.034710071 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:46 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5087. Jul 08 08:49:46 managed-node1 podman[33542]: 2025-07-08 08:49:46.400711535 -0400 EDT m=+0.080788511 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:46 managed-node1 podman[33542]: 2025-07-08 08:49:46.40328957 -0400 EDT m=+0.083366573 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:46 managed-node1 auth_test_1_kube-auth_test_1_kube[33553]: This container is intended for podman CI testing Jul 08 08:49:46 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:46 managed-node1 podman[33557]: 2025-07-08 08:49:46.43593079 -0400 EDT m=+0.019914898 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:46 managed-node1 podman[33557]: 2025-07-08 08:49:46.44840681 -0400 EDT m=+0.032390875 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:46 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5094. Jul 08 08:49:46 managed-node1 podman[33557]: 2025-07-08 08:49:46.501216122 -0400 EDT m=+0.085200201 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:46 managed-node1 podman[33557]: 2025-07-08 08:49:46.504296699 -0400 EDT m=+0.088280830 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:46 managed-node1 auth_test_1_kube-auth_test_1_kube[33568]: This container is intended for podman CI testing Jul 08 08:49:46 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:46 managed-node1 podman[33572]: 2025-07-08 08:49:46.533910603 -0400 EDT m=+0.019233571 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:46 managed-node1 podman[33572]: 2025-07-08 08:49:46.546704422 -0400 EDT m=+0.032027359 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:46 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5101. Jul 08 08:49:46 managed-node1 podman[33572]: 2025-07-08 08:49:46.594679115 -0400 EDT m=+0.080002047 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:46 managed-node1 podman[33572]: 2025-07-08 08:49:46.597331818 -0400 EDT m=+0.082654790 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:46 managed-node1 auth_test_1_kube-auth_test_1_kube[33583]: This container is intended for podman CI testing Jul 08 08:49:46 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:46 managed-node1 podman[33587]: 2025-07-08 08:49:46.629552133 -0400 EDT m=+0.020339325 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:46 managed-node1 podman[33587]: 2025-07-08 08:49:46.641895583 -0400 EDT m=+0.032682716 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:46 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5108. Jul 08 08:49:46 managed-node1 podman[33587]: 2025-07-08 08:49:46.690758245 -0400 EDT m=+0.081545387 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:46 managed-node1 podman[33587]: 2025-07-08 08:49:46.692988727 -0400 EDT m=+0.083775886 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:46 managed-node1 auth_test_1_kube-auth_test_1_kube[33599]: This container is intended for podman CI testing Jul 08 08:49:46 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:46 managed-node1 conmon[33599]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:46 managed-node1 podman[33603]: 2025-07-08 08:49:46.726894452 -0400 EDT m=+0.020112478 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:46 managed-node1 podman[33603]: 2025-07-08 08:49:46.739575229 -0400 EDT m=+0.032793244 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:46 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5115. Jul 08 08:49:46 managed-node1 podman[33603]: 2025-07-08 08:49:46.794088093 -0400 EDT m=+0.087306240 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:46 managed-node1 auth_test_1_kube-auth_test_1_kube[33614]: This container is intended for podman CI testing Jul 08 08:49:46 managed-node1 podman[33603]: 2025-07-08 08:49:46.79814541 -0400 EDT m=+0.091363541 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:46 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:46 managed-node1 podman[33640]: 2025-07-08 08:49:46.842178721 -0400 EDT m=+0.028105369 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:46 managed-node1 podman[33640]: 2025-07-08 08:49:46.856958194 -0400 EDT m=+0.042884619 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:46 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5122. Jul 08 08:49:46 managed-node1 podman[33640]: 2025-07-08 08:49:46.907632511 -0400 EDT m=+0.093559004 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:46 managed-node1 podman[33640]: 2025-07-08 08:49:46.909868606 -0400 EDT m=+0.095795091 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:46 managed-node1 auth_test_1_kube-auth_test_1_kube[33695]: This container is intended for podman CI testing Jul 08 08:49:46 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:46 managed-node1 conmon[33695]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:46 managed-node1 podman[33716]: 2025-07-08 08:49:46.956343788 -0400 EDT m=+0.028484493 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:46 managed-node1 podman[33716]: 2025-07-08 08:49:46.971819147 -0400 EDT m=+0.043959658 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:47 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5129. Jul 08 08:49:47 managed-node1 podman[33716]: 2025-07-08 08:49:47.044987412 -0400 EDT m=+0.117128101 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:47 managed-node1 auth_test_1_kube-auth_test_1_kube[33774]: This container is intended for podman CI testing Jul 08 08:49:47 managed-node1 podman[33716]: 2025-07-08 08:49:47.0492363 -0400 EDT m=+0.121376849 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:47 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:47 managed-node1 podman[33778]: 2025-07-08 08:49:47.098897718 -0400 EDT m=+0.032891859 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:47 managed-node1 podman[33778]: 2025-07-08 08:49:47.113649419 -0400 EDT m=+0.047643361 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:47 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5136. Jul 08 08:49:47 managed-node1 podman[33778]: 2025-07-08 08:49:47.172175405 -0400 EDT m=+0.106169573 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:47 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:47 managed-node1 podman[33778]: 2025-07-08 08:49:47.177509501 -0400 EDT m=+0.111503424 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:47 managed-node1 conmon[33790]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:47 managed-node1 auth_test_1_kube-auth_test_1_kube[33790]: This container is intended for podman CI testing Jul 08 08:49:47 managed-node1 python3.12[33772]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:49:47 managed-node1 podman[33794]: 2025-07-08 08:49:47.223151006 -0400 EDT m=+0.032410745 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:47 managed-node1 podman[33794]: 2025-07-08 08:49:47.237208679 -0400 EDT m=+0.046468524 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:47 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5143. Jul 08 08:49:47 managed-node1 podman[33794]: 2025-07-08 08:49:47.296149541 -0400 EDT m=+0.105409245 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:47 managed-node1 podman[33794]: 2025-07-08 08:49:47.298412557 -0400 EDT m=+0.107672266 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:47 managed-node1 auth_test_1_kube-auth_test_1_kube[33832]: This container is intended for podman CI testing Jul 08 08:49:47 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:47 managed-node1 podman[33836]: 2025-07-08 08:49:47.331266377 -0400 EDT m=+0.020000340 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:47 managed-node1 podman[33836]: 2025-07-08 08:49:47.34470389 -0400 EDT m=+0.033437826 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:47 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5150. Jul 08 08:49:47 managed-node1 podman[33836]: 2025-07-08 08:49:47.404422265 -0400 EDT m=+0.093156201 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:47 managed-node1 podman[33836]: 2025-07-08 08:49:47.406943453 -0400 EDT m=+0.095677421 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:47 managed-node1 auth_test_1_kube-auth_test_1_kube[33847]: This container is intended for podman CI testing Jul 08 08:49:47 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:47 managed-node1 podman[33851]: 2025-07-08 08:49:47.436908797 -0400 EDT m=+0.021001065 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:47 managed-node1 podman[33851]: 2025-07-08 08:49:47.449365459 -0400 EDT m=+0.033457632 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:47 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5157. Jul 08 08:49:47 managed-node1 podman[33851]: 2025-07-08 08:49:47.499453503 -0400 EDT m=+0.083545709 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:47 managed-node1 podman[33851]: 2025-07-08 08:49:47.5018498 -0400 EDT m=+0.085942306 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:47 managed-node1 auth_test_1_kube-auth_test_1_kube[33863]: This container is intended for podman CI testing Jul 08 08:49:47 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:47 managed-node1 podman[33867]: 2025-07-08 08:49:47.534736138 -0400 EDT m=+0.020486325 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:47 managed-node1 podman[33867]: 2025-07-08 08:49:47.547230152 -0400 EDT m=+0.032980251 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:47 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5164. Jul 08 08:49:47 managed-node1 podman[33867]: 2025-07-08 08:49:47.597133016 -0400 EDT m=+0.082883207 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:47 managed-node1 podman[33867]: 2025-07-08 08:49:47.599409134 -0400 EDT m=+0.085159269 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:47 managed-node1 auth_test_1_kube-auth_test_1_kube[33878]: This container is intended for podman CI testing Jul 08 08:49:47 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:47 managed-node1 podman[33882]: 2025-07-08 08:49:47.634721629 -0400 EDT m=+0.022523862 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:47 managed-node1 podman[33882]: 2025-07-08 08:49:47.646999059 -0400 EDT m=+0.034801249 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:47 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5171. Jul 08 08:49:47 managed-node1 podman[33882]: 2025-07-08 08:49:47.695847177 -0400 EDT m=+0.083649362 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:47 managed-node1 podman[33882]: 2025-07-08 08:49:47.698223389 -0400 EDT m=+0.086025604 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:47 managed-node1 auth_test_1_kube-auth_test_1_kube[33894]: This container is intended for podman CI testing Jul 08 08:49:47 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:47 managed-node1 conmon[33894]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:47 managed-node1 podman[33898]: 2025-07-08 08:49:47.729215233 -0400 EDT m=+0.019668273 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:47 managed-node1 podman[33898]: 2025-07-08 08:49:47.741919226 -0400 EDT m=+0.032372173 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:47 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5178. Jul 08 08:49:47 managed-node1 podman[33898]: 2025-07-08 08:49:47.79163482 -0400 EDT m=+0.082087793 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:47 managed-node1 podman[33898]: 2025-07-08 08:49:47.793910139 -0400 EDT m=+0.084363124 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:47 managed-node1 auth_test_1_kube-auth_test_1_kube[33909]: This container is intended for podman CI testing Jul 08 08:49:47 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:47 managed-node1 podman[33913]: 2025-07-08 08:49:47.824136159 -0400 EDT m=+0.020193567 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:47 managed-node1 podman[33913]: 2025-07-08 08:49:47.837239421 -0400 EDT m=+0.033296646 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:47 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5185. Jul 08 08:49:47 managed-node1 podman[33913]: 2025-07-08 08:49:47.883698497 -0400 EDT m=+0.079755730 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:47 managed-node1 podman[33913]: 2025-07-08 08:49:47.885994806 -0400 EDT m=+0.082052053 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:47 managed-node1 auth_test_1_kube-auth_test_1_kube[33925]: This container is intended for podman CI testing Jul 08 08:49:47 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:47 managed-node1 podman[33929]: 2025-07-08 08:49:47.920128655 -0400 EDT m=+0.020019483 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:47 managed-node1 podman[33929]: 2025-07-08 08:49:47.932768661 -0400 EDT m=+0.032659356 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:47 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5192. Jul 08 08:49:47 managed-node1 podman[33929]: 2025-07-08 08:49:47.97598732 -0400 EDT m=+0.075878022 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:47 managed-node1 podman[33929]: 2025-07-08 08:49:47.978595153 -0400 EDT m=+0.078485910 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:47 managed-node1 auth_test_1_kube-auth_test_1_kube[33940]: This container is intended for podman CI testing Jul 08 08:49:47 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:48 managed-node1 podman[33944]: 2025-07-08 08:49:48.011452469 -0400 EDT m=+0.020001856 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:48 managed-node1 podman[33944]: 2025-07-08 08:49:48.024031825 -0400 EDT m=+0.032581173 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:48 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5199. Jul 08 08:49:48 managed-node1 podman[33944]: 2025-07-08 08:49:48.077377205 -0400 EDT m=+0.085926587 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:48 managed-node1 podman[33944]: 2025-07-08 08:49:48.080239495 -0400 EDT m=+0.088788882 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:48 managed-node1 auth_test_1_kube-auth_test_1_kube[33955]: This container is intended for podman CI testing Jul 08 08:49:48 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:48 managed-node1 podman[33959]: 2025-07-08 08:49:48.114038805 -0400 EDT m=+0.020085840 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:48 managed-node1 podman[33959]: 2025-07-08 08:49:48.126637728 -0400 EDT m=+0.032684732 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:48 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5206. Jul 08 08:49:48 managed-node1 podman[33959]: 2025-07-08 08:49:48.175852831 -0400 EDT m=+0.081899828 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:48 managed-node1 auth_test_1_kube-auth_test_1_kube[33970]: This container is intended for podman CI testing Jul 08 08:49:48 managed-node1 podman[33959]: 2025-07-08 08:49:48.17838063 -0400 EDT m=+0.084427648 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:48 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:48 managed-node1 podman[33974]: 2025-07-08 08:49:48.209443354 -0400 EDT m=+0.019149410 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:48 managed-node1 podman[33974]: 2025-07-08 08:49:48.221899945 -0400 EDT m=+0.031605950 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:48 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5213. Jul 08 08:49:48 managed-node1 podman[33974]: 2025-07-08 08:49:48.26397088 -0400 EDT m=+0.073676878 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:48 managed-node1 podman[33974]: 2025-07-08 08:49:48.266185294 -0400 EDT m=+0.075891305 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:48 managed-node1 auth_test_1_kube-auth_test_1_kube[33987]: This container is intended for podman CI testing Jul 08 08:49:48 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:48 managed-node1 conmon[33987]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:48 managed-node1 podman[33991]: 2025-07-08 08:49:48.2997921 -0400 EDT m=+0.020653478 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:48 managed-node1 podman[33991]: 2025-07-08 08:49:48.312964173 -0400 EDT m=+0.033825456 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:48 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5220. Jul 08 08:49:48 managed-node1 podman[33991]: 2025-07-08 08:49:48.366740991 -0400 EDT m=+0.087602299 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:48 managed-node1 auth_test_1_kube-auth_test_1_kube[34002]: This container is intended for podman CI testing Jul 08 08:49:48 managed-node1 podman[33991]: 2025-07-08 08:49:48.369396108 -0400 EDT m=+0.090257515 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:48 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:48 managed-node1 podman[34006]: 2025-07-08 08:49:48.401963754 -0400 EDT m=+0.019984602 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:48 managed-node1 podman[34006]: 2025-07-08 08:49:48.414636071 -0400 EDT m=+0.032656853 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:48 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5227. Jul 08 08:49:48 managed-node1 podman[34006]: 2025-07-08 08:49:48.463279998 -0400 EDT m=+0.081300798 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:48 managed-node1 auth_test_1_kube-auth_test_1_kube[34017]: This container is intended for podman CI testing Jul 08 08:49:48 managed-node1 podman[34006]: 2025-07-08 08:49:48.465810417 -0400 EDT m=+0.083831250 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:48 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:48 managed-node1 podman[34021]: 2025-07-08 08:49:48.497279996 -0400 EDT m=+0.018834447 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:48 managed-node1 podman[34021]: 2025-07-08 08:49:48.510078876 -0400 EDT m=+0.031633279 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:48 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5234. Jul 08 08:49:48 managed-node1 podman[34021]: 2025-07-08 08:49:48.562758804 -0400 EDT m=+0.084313210 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:48 managed-node1 podman[34021]: 2025-07-08 08:49:48.565079639 -0400 EDT m=+0.086634073 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:48 managed-node1 auth_test_1_kube-auth_test_1_kube[34032]: This container is intended for podman CI testing Jul 08 08:49:48 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:48 managed-node1 podman[34036]: 2025-07-08 08:49:48.597345408 -0400 EDT m=+0.019006558 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:48 managed-node1 podman[34036]: 2025-07-08 08:49:48.609878343 -0400 EDT m=+0.031539338 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:48 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5241. Jul 08 08:49:48 managed-node1 podman[34036]: 2025-07-08 08:49:48.651001519 -0400 EDT m=+0.072662517 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:48 managed-node1 podman[34036]: 2025-07-08 08:49:48.653626332 -0400 EDT m=+0.075287365 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:48 managed-node1 auth_test_1_kube-auth_test_1_kube[34047]: This container is intended for podman CI testing Jul 08 08:49:48 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:48 managed-node1 podman[34051]: 2025-07-08 08:49:48.686199905 -0400 EDT m=+0.020077534 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:48 managed-node1 podman[34051]: 2025-07-08 08:49:48.698857155 -0400 EDT m=+0.032734721 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:48 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5248. Jul 08 08:49:48 managed-node1 podman[34051]: 2025-07-08 08:49:48.75022344 -0400 EDT m=+0.084101010 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:48 managed-node1 podman[34051]: 2025-07-08 08:49:48.752541475 -0400 EDT m=+0.086419071 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:48 managed-node1 auth_test_1_kube-auth_test_1_kube[34063]: This container is intended for podman CI testing Jul 08 08:49:48 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:48 managed-node1 podman[34067]: 2025-07-08 08:49:48.782506253 -0400 EDT m=+0.020585418 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:48 managed-node1 podman[34067]: 2025-07-08 08:49:48.794763903 -0400 EDT m=+0.032843049 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:48 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5255. Jul 08 08:49:48 managed-node1 podman[34067]: 2025-07-08 08:49:48.839529999 -0400 EDT m=+0.077609197 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:48 managed-node1 podman[34067]: 2025-07-08 08:49:48.841788624 -0400 EDT m=+0.079867778 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:48 managed-node1 auth_test_1_kube-auth_test_1_kube[34079]: This container is intended for podman CI testing Jul 08 08:49:48 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:48 managed-node1 podman[34083]: 2025-07-08 08:49:48.873954077 -0400 EDT m=+0.019815458 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:48 managed-node1 podman[34083]: 2025-07-08 08:49:48.886952201 -0400 EDT m=+0.032813564 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:48 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5262. Jul 08 08:49:48 managed-node1 podman[34083]: 2025-07-08 08:49:48.939018792 -0400 EDT m=+0.084880219 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:48 managed-node1 podman[34083]: 2025-07-08 08:49:48.941284488 -0400 EDT m=+0.087145886 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:48 managed-node1 auth_test_1_kube-auth_test_1_kube[34094]: This container is intended for podman CI testing Jul 08 08:49:48 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:48 managed-node1 podman[34098]: 2025-07-08 08:49:48.970114144 -0400 EDT m=+0.019376077 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:48 managed-node1 podman[34098]: 2025-07-08 08:49:48.982705486 -0400 EDT m=+0.031967451 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:49 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5269. Jul 08 08:49:49 managed-node1 podman[34098]: 2025-07-08 08:49:49.034115159 -0400 EDT m=+0.083377181 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:49 managed-node1 podman[34098]: 2025-07-08 08:49:49.036373194 -0400 EDT m=+0.085635165 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:49 managed-node1 auth_test_1_kube-auth_test_1_kube[34110]: This container is intended for podman CI testing Jul 08 08:49:49 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:49 managed-node1 podman[34114]: 2025-07-08 08:49:49.068177986 -0400 EDT m=+0.018833378 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:49 managed-node1 podman[34114]: 2025-07-08 08:49:49.080844182 -0400 EDT m=+0.031499518 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:49 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5276. Jul 08 08:49:49 managed-node1 podman[34114]: 2025-07-08 08:49:49.132890916 -0400 EDT m=+0.083546268 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:49 managed-node1 podman[34114]: 2025-07-08 08:49:49.135916047 -0400 EDT m=+0.086571507 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:49 managed-node1 auth_test_1_kube-auth_test_1_kube[34125]: This container is intended for podman CI testing Jul 08 08:49:49 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:49 managed-node1 podman[34130]: 2025-07-08 08:49:49.169663613 -0400 EDT m=+0.023219464 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:49 managed-node1 podman[34130]: 2025-07-08 08:49:49.182757517 -0400 EDT m=+0.036313167 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:49 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5283. Jul 08 08:49:49 managed-node1 podman[34130]: 2025-07-08 08:49:49.226407952 -0400 EDT m=+0.079963608 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:49 managed-node1 auth_test_1_kube-auth_test_1_kube[34141]: This container is intended for podman CI testing Jul 08 08:49:49 managed-node1 podman[34130]: 2025-07-08 08:49:49.229312932 -0400 EDT m=+0.082868620 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:49 managed-node1 conmon[34141]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:49 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:49 managed-node1 podman[34145]: 2025-07-08 08:49:49.259028664 -0400 EDT m=+0.020826764 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:49 managed-node1 podman[34145]: 2025-07-08 08:49:49.271800703 -0400 EDT m=+0.033598804 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:49 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5290. Jul 08 08:49:49 managed-node1 podman[34145]: 2025-07-08 08:49:49.320079906 -0400 EDT m=+0.081878046 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:49 managed-node1 podman[34145]: 2025-07-08 08:49:49.322360297 -0400 EDT m=+0.084158397 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:49 managed-node1 auth_test_1_kube-auth_test_1_kube[34158]: This container is intended for podman CI testing Jul 08 08:49:49 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:49 managed-node1 conmon[34158]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:49 managed-node1 podman[34162]: 2025-07-08 08:49:49.354181891 -0400 EDT m=+0.019486538 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:49 managed-node1 podman[34162]: 2025-07-08 08:49:49.366999551 -0400 EDT m=+0.032304118 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:49 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5297. Jul 08 08:49:49 managed-node1 podman[34162]: 2025-07-08 08:49:49.412204529 -0400 EDT m=+0.077509139 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:49 managed-node1 podman[34162]: 2025-07-08 08:49:49.414519645 -0400 EDT m=+0.079824263 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:49 managed-node1 auth_test_1_kube-auth_test_1_kube[34174]: This container is intended for podman CI testing Jul 08 08:49:49 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:49 managed-node1 podman[34178]: 2025-07-08 08:49:49.448006888 -0400 EDT m=+0.020382843 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:49 managed-node1 podman[34178]: 2025-07-08 08:49:49.460572271 -0400 EDT m=+0.032948211 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:49 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5304. Jul 08 08:49:49 managed-node1 podman[34178]: 2025-07-08 08:49:49.504582378 -0400 EDT m=+0.076958491 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:49 managed-node1 podman[34178]: 2025-07-08 08:49:49.508391375 -0400 EDT m=+0.080767425 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:49 managed-node1 auth_test_1_kube-auth_test_1_kube[34190]: This container is intended for podman CI testing Jul 08 08:49:49 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:49 managed-node1 podman[34202]: 2025-07-08 08:49:49.5517187 -0400 EDT m=+0.025513816 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:49 managed-node1 podman[34202]: 2025-07-08 08:49:49.565640744 -0400 EDT m=+0.039435731 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:49 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5311. Jul 08 08:49:49 managed-node1 podman[34202]: 2025-07-08 08:49:49.620244581 -0400 EDT m=+0.094039639 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:49 managed-node1 podman[34202]: 2025-07-08 08:49:49.622807718 -0400 EDT m=+0.096602697 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:49 managed-node1 auth_test_1_kube-auth_test_1_kube[34263]: This container is intended for podman CI testing Jul 08 08:49:49 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:49 managed-node1 podman[34288]: 2025-07-08 08:49:49.672425537 -0400 EDT m=+0.036848435 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:49 managed-node1 podman[34288]: 2025-07-08 08:49:49.68658677 -0400 EDT m=+0.051009922 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:49 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5318. Jul 08 08:49:49 managed-node1 podman[34288]: 2025-07-08 08:49:49.756107113 -0400 EDT m=+0.120530428 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:49 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:49 managed-node1 auth_test_1_kube-auth_test_1_kube[34351]: This container is intended for podman CI testing Jul 08 08:49:49 managed-node1 conmon[34351]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:49 managed-node1 podman[34288]: 2025-07-08 08:49:49.762543671 -0400 EDT m=+0.126966670 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:49 managed-node1 podman[34355]: 2025-07-08 08:49:49.81243902 -0400 EDT m=+0.035935072 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:49 managed-node1 podman[34355]: 2025-07-08 08:49:49.827940145 -0400 EDT m=+0.051436097 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:49 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5325. Jul 08 08:49:49 managed-node1 python3.12[34349]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:49:49 managed-node1 podman[34355]: 2025-07-08 08:49:49.90705641 -0400 EDT m=+0.130552597 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:49 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:49 managed-node1 podman[34355]: 2025-07-08 08:49:49.912203202 -0400 EDT m=+0.135699177 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:49 managed-node1 auth_test_1_kube-auth_test_1_kube[34367]: This container is intended for podman CI testing Jul 08 08:49:49 managed-node1 conmon[34367]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:49 managed-node1 podman[34373]: 2025-07-08 08:49:49.952833053 -0400 EDT m=+0.025054188 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:49 managed-node1 podman[34373]: 2025-07-08 08:49:49.966408755 -0400 EDT m=+0.038629636 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:50 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5332. Jul 08 08:49:50 managed-node1 podman[34373]: 2025-07-08 08:49:50.01387811 -0400 EDT m=+0.086099048 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:50 managed-node1 podman[34373]: 2025-07-08 08:49:50.016837306 -0400 EDT m=+0.089058241 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:50 managed-node1 auth_test_1_kube-auth_test_1_kube[34409]: This container is intended for podman CI testing Jul 08 08:49:50 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:50 managed-node1 conmon[34409]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:50 managed-node1 podman[34413]: 2025-07-08 08:49:50.044603388 -0400 EDT m=+0.018237094 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:50 managed-node1 podman[34413]: 2025-07-08 08:49:50.057419606 -0400 EDT m=+0.031053305 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:50 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5339. Jul 08 08:49:50 managed-node1 podman[34413]: 2025-07-08 08:49:50.106411777 -0400 EDT m=+0.080045470 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:50 managed-node1 podman[34413]: 2025-07-08 08:49:50.109023029 -0400 EDT m=+0.082656864 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:50 managed-node1 auth_test_1_kube-auth_test_1_kube[34425]: This container is intended for podman CI testing Jul 08 08:49:50 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:50 managed-node1 podman[34429]: 2025-07-08 08:49:50.14027844 -0400 EDT m=+0.021282662 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:50 managed-node1 podman[34429]: 2025-07-08 08:49:50.152958137 -0400 EDT m=+0.033962260 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:50 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5346. Jul 08 08:49:50 managed-node1 podman[34429]: 2025-07-08 08:49:50.198946969 -0400 EDT m=+0.079951112 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:50 managed-node1 podman[34429]: 2025-07-08 08:49:50.201247195 -0400 EDT m=+0.082251363 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:50 managed-node1 auth_test_1_kube-auth_test_1_kube[34441]: This container is intended for podman CI testing Jul 08 08:49:50 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:50 managed-node1 podman[34445]: 2025-07-08 08:49:50.232670019 -0400 EDT m=+0.019042210 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:50 managed-node1 podman[34445]: 2025-07-08 08:49:50.245569477 -0400 EDT m=+0.031941586 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:50 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5353. Jul 08 08:49:50 managed-node1 podman[34445]: 2025-07-08 08:49:50.289153121 -0400 EDT m=+0.075525288 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:50 managed-node1 podman[34445]: 2025-07-08 08:49:50.29144807 -0400 EDT m=+0.077820446 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:50 managed-node1 auth_test_1_kube-auth_test_1_kube[34457]: This container is intended for podman CI testing Jul 08 08:49:50 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:50 managed-node1 conmon[34457]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:50 managed-node1 podman[34461]: 2025-07-08 08:49:50.321324859 -0400 EDT m=+0.020135867 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:50 managed-node1 podman[34461]: 2025-07-08 08:49:50.33434929 -0400 EDT m=+0.033160267 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:50 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5360. Jul 08 08:49:50 managed-node1 podman[34461]: 2025-07-08 08:49:50.378924546 -0400 EDT m=+0.077735626 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:50 managed-node1 podman[34461]: 2025-07-08 08:49:50.381208079 -0400 EDT m=+0.080019250 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:50 managed-node1 auth_test_1_kube-auth_test_1_kube[34473]: This container is intended for podman CI testing Jul 08 08:49:50 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:50 managed-node1 conmon[34473]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:50 managed-node1 podman[34477]: 2025-07-08 08:49:50.413602604 -0400 EDT m=+0.020596862 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:50 managed-node1 podman[34477]: 2025-07-08 08:49:50.426273388 -0400 EDT m=+0.033267586 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:50 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5367. Jul 08 08:49:50 managed-node1 podman[34477]: 2025-07-08 08:49:50.482970616 -0400 EDT m=+0.089964814 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:50 managed-node1 podman[34477]: 2025-07-08 08:49:50.486156553 -0400 EDT m=+0.093151125 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:50 managed-node1 auth_test_1_kube-auth_test_1_kube[34488]: This container is intended for podman CI testing Jul 08 08:49:50 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:50 managed-node1 podman[34492]: 2025-07-08 08:49:50.515366939 -0400 EDT m=+0.019844643 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:50 managed-node1 podman[34492]: 2025-07-08 08:49:50.527762341 -0400 EDT m=+0.032240027 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:50 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5374. Jul 08 08:49:50 managed-node1 podman[34492]: 2025-07-08 08:49:50.574853568 -0400 EDT m=+0.079331256 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:50 managed-node1 podman[34492]: 2025-07-08 08:49:50.577156975 -0400 EDT m=+0.081634701 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:50 managed-node1 auth_test_1_kube-auth_test_1_kube[34503]: This container is intended for podman CI testing Jul 08 08:49:50 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:50 managed-node1 podman[34507]: 2025-07-08 08:49:50.610108896 -0400 EDT m=+0.019722511 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:50 managed-node1 podman[34507]: 2025-07-08 08:49:50.622705094 -0400 EDT m=+0.032318590 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:50 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5381. Jul 08 08:49:50 managed-node1 podman[34507]: 2025-07-08 08:49:50.668869723 -0400 EDT m=+0.078483353 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:50 managed-node1 podman[34507]: 2025-07-08 08:49:50.672170263 -0400 EDT m=+0.081783833 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:50 managed-node1 auth_test_1_kube-auth_test_1_kube[34518]: This container is intended for podman CI testing Jul 08 08:49:50 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:50 managed-node1 podman[34522]: 2025-07-08 08:49:50.707159048 -0400 EDT m=+0.022341059 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:50 managed-node1 podman[34522]: 2025-07-08 08:49:50.719997876 -0400 EDT m=+0.035179885 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:50 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5388. Jul 08 08:49:50 managed-node1 podman[34522]: 2025-07-08 08:49:50.763582018 -0400 EDT m=+0.078764019 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:50 managed-node1 podman[34522]: 2025-07-08 08:49:50.765810113 -0400 EDT m=+0.080992132 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:50 managed-node1 auth_test_1_kube-auth_test_1_kube[34533]: This container is intended for podman CI testing Jul 08 08:49:50 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:50 managed-node1 podman[34537]: 2025-07-08 08:49:50.79394865 -0400 EDT m=+0.018973088 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:50 managed-node1 podman[34537]: 2025-07-08 08:49:50.807084965 -0400 EDT m=+0.032109455 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:50 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5395. Jul 08 08:49:50 managed-node1 podman[34537]: 2025-07-08 08:49:50.851272783 -0400 EDT m=+0.076297235 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:50 managed-node1 podman[34537]: 2025-07-08 08:49:50.853483802 -0400 EDT m=+0.078508269 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:50 managed-node1 auth_test_1_kube-auth_test_1_kube[34548]: This container is intended for podman CI testing Jul 08 08:49:50 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:50 managed-node1 podman[34552]: 2025-07-08 08:49:50.885571405 -0400 EDT m=+0.019189971 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:50 managed-node1 podman[34552]: 2025-07-08 08:49:50.898112952 -0400 EDT m=+0.031731451 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:50 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5402. Jul 08 08:49:50 managed-node1 podman[34552]: 2025-07-08 08:49:50.944736716 -0400 EDT m=+0.078355266 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:50 managed-node1 podman[34552]: 2025-07-08 08:49:50.947515836 -0400 EDT m=+0.081134365 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:50 managed-node1 auth_test_1_kube-auth_test_1_kube[34563]: This container is intended for podman CI testing Jul 08 08:49:50 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:50 managed-node1 podman[34567]: 2025-07-08 08:49:50.977984989 -0400 EDT m=+0.020979643 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:50 managed-node1 podman[34567]: 2025-07-08 08:49:50.990807822 -0400 EDT m=+0.033802272 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:51 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5409. Jul 08 08:49:51 managed-node1 podman[34567]: 2025-07-08 08:49:51.036679245 -0400 EDT m=+0.079673683 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:51 managed-node1 podman[34567]: 2025-07-08 08:49:51.038959012 -0400 EDT m=+0.081953462 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:51 managed-node1 auth_test_1_kube-auth_test_1_kube[34578]: This container is intended for podman CI testing Jul 08 08:49:51 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:51 managed-node1 podman[34582]: 2025-07-08 08:49:51.070108649 -0400 EDT m=+0.021434065 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:51 managed-node1 podman[34582]: 2025-07-08 08:49:51.082800889 -0400 EDT m=+0.034126324 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:51 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5416. Jul 08 08:49:51 managed-node1 podman[34582]: 2025-07-08 08:49:51.134760642 -0400 EDT m=+0.086086140 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:51 managed-node1 podman[34582]: 2025-07-08 08:49:51.137199535 -0400 EDT m=+0.088525117 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:51 managed-node1 auth_test_1_kube-auth_test_1_kube[34593]: This container is intended for podman CI testing Jul 08 08:49:51 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:51 managed-node1 podman[34597]: 2025-07-08 08:49:51.170718179 -0400 EDT m=+0.020132858 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:51 managed-node1 podman[34597]: 2025-07-08 08:49:51.18270693 -0400 EDT m=+0.032121342 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:51 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5423. Jul 08 08:49:51 managed-node1 podman[34597]: 2025-07-08 08:49:51.229909139 -0400 EDT m=+0.079323558 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:51 managed-node1 podman[34597]: 2025-07-08 08:49:51.232549006 -0400 EDT m=+0.081963536 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:51 managed-node1 auth_test_1_kube-auth_test_1_kube[34608]: This container is intended for podman CI testing Jul 08 08:49:51 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:51 managed-node1 podman[34612]: 2025-07-08 08:49:51.261318869 -0400 EDT m=+0.019791525 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:51 managed-node1 podman[34612]: 2025-07-08 08:49:51.27393467 -0400 EDT m=+0.032407237 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:51 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5430. Jul 08 08:49:51 managed-node1 podman[34612]: 2025-07-08 08:49:51.322707529 -0400 EDT m=+0.081180097 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:51 managed-node1 podman[34612]: 2025-07-08 08:49:51.324957676 -0400 EDT m=+0.083430360 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:51 managed-node1 auth_test_1_kube-auth_test_1_kube[34623]: This container is intended for podman CI testing Jul 08 08:49:51 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:51 managed-node1 conmon[34623]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:51 managed-node1 podman[34628]: 2025-07-08 08:49:51.357302871 -0400 EDT m=+0.019873605 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:51 managed-node1 podman[34628]: 2025-07-08 08:49:51.369677187 -0400 EDT m=+0.032247848 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:51 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5437. Jul 08 08:49:51 managed-node1 podman[34628]: 2025-07-08 08:49:51.412764957 -0400 EDT m=+0.075335673 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:51 managed-node1 podman[34628]: 2025-07-08 08:49:51.415079414 -0400 EDT m=+0.077650213 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:51 managed-node1 auth_test_1_kube-auth_test_1_kube[34639]: This container is intended for podman CI testing Jul 08 08:49:51 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:51 managed-node1 conmon[34639]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:51 managed-node1 podman[34644]: 2025-07-08 08:49:51.450177559 -0400 EDT m=+0.024940839 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:51 managed-node1 podman[34644]: 2025-07-08 08:49:51.46464424 -0400 EDT m=+0.039407266 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:51 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5444. Jul 08 08:49:51 managed-node1 podman[34644]: 2025-07-08 08:49:51.522426136 -0400 EDT m=+0.097189367 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:51 managed-node1 auth_test_1_kube-auth_test_1_kube[34700]: This container is intended for podman CI testing Jul 08 08:49:51 managed-node1 podman[34644]: 2025-07-08 08:49:51.526546755 -0400 EDT m=+0.101309701 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:51 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:51 managed-node1 podman[34726]: 2025-07-08 08:49:51.567774571 -0400 EDT m=+0.026283228 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:51 managed-node1 podman[34726]: 2025-07-08 08:49:51.582173666 -0400 EDT m=+0.040682285 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:51 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5451. Jul 08 08:49:51 managed-node1 podman[34726]: 2025-07-08 08:49:51.646536272 -0400 EDT m=+0.105045205 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:51 managed-node1 auth_test_1_kube-auth_test_1_kube[34782]: This container is intended for podman CI testing Jul 08 08:49:51 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:51 managed-node1 conmon[34782]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:51 managed-node1 podman[34726]: 2025-07-08 08:49:51.651445419 -0400 EDT m=+0.109954130 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:51 managed-node1 podman[34804]: 2025-07-08 08:49:51.700497825 -0400 EDT m=+0.032592195 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:51 managed-node1 podman[34804]: 2025-07-08 08:49:51.713819615 -0400 EDT m=+0.045913893 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:51 managed-node1 python3.12[34802]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:49:51 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5458. Jul 08 08:49:51 managed-node1 podman[34804]: 2025-07-08 08:49:51.824761802 -0400 EDT m=+0.156856211 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:51 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:51 managed-node1 auth_test_1_kube-auth_test_1_kube[34815]: This container is intended for podman CI testing Jul 08 08:49:51 managed-node1 conmon[34815]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:51 managed-node1 podman[34804]: 2025-07-08 08:49:51.831023939 -0400 EDT m=+0.163118200 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:51 managed-node1 podman[34821]: 2025-07-08 08:49:51.872133012 -0400 EDT m=+0.031590733 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:51 managed-node1 podman[34821]: 2025-07-08 08:49:51.885646271 -0400 EDT m=+0.045103780 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:51 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5465. Jul 08 08:49:51 managed-node1 podman[34821]: 2025-07-08 08:49:51.932911436 -0400 EDT m=+0.092369102 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:51 managed-node1 podman[34821]: 2025-07-08 08:49:51.937824433 -0400 EDT m=+0.097282013 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:51 managed-node1 auth_test_1_kube-auth_test_1_kube[34857]: This container is intended for podman CI testing Jul 08 08:49:51 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:51 managed-node1 conmon[34857]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:51 managed-node1 podman[34861]: 2025-07-08 08:49:51.970024669 -0400 EDT m=+0.020063434 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:51 managed-node1 podman[34861]: 2025-07-08 08:49:51.982530604 -0400 EDT m=+0.032569401 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:52 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5472. Jul 08 08:49:52 managed-node1 podman[34861]: 2025-07-08 08:49:52.029649096 -0400 EDT m=+0.079687898 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:52 managed-node1 podman[34861]: 2025-07-08 08:49:52.031897225 -0400 EDT m=+0.081936106 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:52 managed-node1 auth_test_1_kube-auth_test_1_kube[34873]: This container is intended for podman CI testing Jul 08 08:49:52 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:52 managed-node1 podman[34877]: 2025-07-08 08:49:52.064301272 -0400 EDT m=+0.019236559 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:52 managed-node1 podman[34877]: 2025-07-08 08:49:52.076969282 -0400 EDT m=+0.031904537 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:52 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5479. Jul 08 08:49:52 managed-node1 podman[34877]: 2025-07-08 08:49:52.125426809 -0400 EDT m=+0.080362136 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:52 managed-node1 podman[34877]: 2025-07-08 08:49:52.1278094 -0400 EDT m=+0.082744802 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:52 managed-node1 auth_test_1_kube-auth_test_1_kube[34888]: This container is intended for podman CI testing Jul 08 08:49:52 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:52 managed-node1 conmon[34888]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:52 managed-node1 podman[34892]: 2025-07-08 08:49:52.157661918 -0400 EDT m=+0.020301566 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:52 managed-node1 podman[34892]: 2025-07-08 08:49:52.170252412 -0400 EDT m=+0.032891993 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:52 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5486. Jul 08 08:49:52 managed-node1 podman[34892]: 2025-07-08 08:49:52.222263041 -0400 EDT m=+0.084902632 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:52 managed-node1 podman[34892]: 2025-07-08 08:49:52.224496917 -0400 EDT m=+0.087136533 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:52 managed-node1 auth_test_1_kube-auth_test_1_kube[34904]: This container is intended for podman CI testing Jul 08 08:49:52 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:52 managed-node1 podman[34908]: 2025-07-08 08:49:52.252564496 -0400 EDT m=+0.019618489 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:52 managed-node1 podman[34908]: 2025-07-08 08:49:52.265004193 -0400 EDT m=+0.032058104 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:52 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5493. Jul 08 08:49:52 managed-node1 podman[34908]: 2025-07-08 08:49:52.315701532 -0400 EDT m=+0.082755442 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:52 managed-node1 podman[34908]: 2025-07-08 08:49:52.31797177 -0400 EDT m=+0.085025729 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:52 managed-node1 auth_test_1_kube-auth_test_1_kube[34920]: This container is intended for podman CI testing Jul 08 08:49:52 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:52 managed-node1 podman[34924]: 2025-07-08 08:49:52.349729488 -0400 EDT m=+0.019737581 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:52 managed-node1 podman[34924]: 2025-07-08 08:49:52.362572499 -0400 EDT m=+0.032580554 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:52 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5500. Jul 08 08:49:52 managed-node1 podman[34924]: 2025-07-08 08:49:52.412063084 -0400 EDT m=+0.082071241 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:52 managed-node1 podman[34924]: 2025-07-08 08:49:52.414363606 -0400 EDT m=+0.084371723 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:52 managed-node1 auth_test_1_kube-auth_test_1_kube[34935]: This container is intended for podman CI testing Jul 08 08:49:52 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:52 managed-node1 podman[34939]: 2025-07-08 08:49:52.448029008 -0400 EDT m=+0.020726318 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:52 managed-node1 podman[34939]: 2025-07-08 08:49:52.460699146 -0400 EDT m=+0.033396467 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:52 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5507. Jul 08 08:49:52 managed-node1 podman[34939]: 2025-07-08 08:49:52.511935791 -0400 EDT m=+0.084633119 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:52 managed-node1 podman[34939]: 2025-07-08 08:49:52.514269499 -0400 EDT m=+0.086966874 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:52 managed-node1 auth_test_1_kube-auth_test_1_kube[34951]: This container is intended for podman CI testing Jul 08 08:49:52 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:52 managed-node1 podman[34955]: 2025-07-08 08:49:52.547364457 -0400 EDT m=+0.019949573 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:52 managed-node1 podman[34955]: 2025-07-08 08:49:52.559869428 -0400 EDT m=+0.032454533 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:52 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5514. Jul 08 08:49:52 managed-node1 podman[34955]: 2025-07-08 08:49:52.604272195 -0400 EDT m=+0.076857302 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:52 managed-node1 podman[34955]: 2025-07-08 08:49:52.607259927 -0400 EDT m=+0.079845062 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:52 managed-node1 auth_test_1_kube-auth_test_1_kube[34966]: This container is intended for podman CI testing Jul 08 08:49:52 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:52 managed-node1 conmon[34966]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:52 managed-node1 podman[34970]: 2025-07-08 08:49:52.636508232 -0400 EDT m=+0.019579286 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:52 managed-node1 podman[34970]: 2025-07-08 08:49:52.649916826 -0400 EDT m=+0.032987855 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:52 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5521. Jul 08 08:49:52 managed-node1 podman[34970]: 2025-07-08 08:49:52.694096176 -0400 EDT m=+0.077167300 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:52 managed-node1 podman[34970]: 2025-07-08 08:49:52.696375296 -0400 EDT m=+0.079446384 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:52 managed-node1 auth_test_1_kube-auth_test_1_kube[34982]: This container is intended for podman CI testing Jul 08 08:49:52 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:52 managed-node1 podman[34986]: 2025-07-08 08:49:52.727887217 -0400 EDT m=+0.020636003 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:52 managed-node1 podman[34986]: 2025-07-08 08:49:52.7406964 -0400 EDT m=+0.033445178 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:52 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5528. Jul 08 08:49:52 managed-node1 podman[34986]: 2025-07-08 08:49:52.785223061 -0400 EDT m=+0.077971841 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:52 managed-node1 podman[34986]: 2025-07-08 08:49:52.787501377 -0400 EDT m=+0.080250208 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:52 managed-node1 auth_test_1_kube-auth_test_1_kube[34998]: This container is intended for podman CI testing Jul 08 08:49:52 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:52 managed-node1 podman[35002]: 2025-07-08 08:49:52.816356887 -0400 EDT m=+0.020080054 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:52 managed-node1 podman[35002]: 2025-07-08 08:49:52.829281424 -0400 EDT m=+0.033004541 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:52 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5535. Jul 08 08:49:52 managed-node1 podman[35002]: 2025-07-08 08:49:52.881723432 -0400 EDT m=+0.085446556 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:52 managed-node1 podman[35002]: 2025-07-08 08:49:52.883942315 -0400 EDT m=+0.087665481 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:52 managed-node1 auth_test_1_kube-auth_test_1_kube[35013]: This container is intended for podman CI testing Jul 08 08:49:52 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:52 managed-node1 podman[35017]: 2025-07-08 08:49:52.915706329 -0400 EDT m=+0.019774268 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:52 managed-node1 podman[35017]: 2025-07-08 08:49:52.927707601 -0400 EDT m=+0.031775518 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:52 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5542. Jul 08 08:49:52 managed-node1 podman[35017]: 2025-07-08 08:49:52.970528 -0400 EDT m=+0.074596004 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:52 managed-node1 podman[35017]: 2025-07-08 08:49:52.973158654 -0400 EDT m=+0.077226615 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:52 managed-node1 auth_test_1_kube-auth_test_1_kube[35028]: This container is intended for podman CI testing Jul 08 08:49:52 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:53 managed-node1 podman[35032]: 2025-07-08 08:49:53.001433921 -0400 EDT m=+0.020038412 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:53 managed-node1 podman[35032]: 2025-07-08 08:49:53.013954542 -0400 EDT m=+0.032558988 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:53 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5549. Jul 08 08:49:53 managed-node1 podman[35032]: 2025-07-08 08:49:53.064806574 -0400 EDT m=+0.083411031 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:53 managed-node1 podman[35032]: 2025-07-08 08:49:53.067083007 -0400 EDT m=+0.085687494 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:53 managed-node1 auth_test_1_kube-auth_test_1_kube[35043]: This container is intended for podman CI testing Jul 08 08:49:53 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:53 managed-node1 podman[35047]: 2025-07-08 08:49:53.097740963 -0400 EDT m=+0.020759320 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:53 managed-node1 podman[35047]: 2025-07-08 08:49:53.11056486 -0400 EDT m=+0.033583200 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:53 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5556. Jul 08 08:49:53 managed-node1 podman[35047]: 2025-07-08 08:49:53.159763414 -0400 EDT m=+0.082781770 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:53 managed-node1 podman[35047]: 2025-07-08 08:49:53.162006083 -0400 EDT m=+0.085024488 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:53 managed-node1 auth_test_1_kube-auth_test_1_kube[35058]: This container is intended for podman CI testing Jul 08 08:49:53 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:53 managed-node1 conmon[35058]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:53 managed-node1 podman[35062]: 2025-07-08 08:49:53.193654904 -0400 EDT m=+0.019439840 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:53 managed-node1 podman[35062]: 2025-07-08 08:49:53.206217556 -0400 EDT m=+0.032002451 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:53 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5563. Jul 08 08:49:53 managed-node1 podman[35062]: 2025-07-08 08:49:53.26219104 -0400 EDT m=+0.087975941 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:53 managed-node1 podman[35062]: 2025-07-08 08:49:53.267010852 -0400 EDT m=+0.092795806 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:53 managed-node1 auth_test_1_kube-auth_test_1_kube[35096]: This container is intended for podman CI testing Jul 08 08:49:53 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:53 managed-node1 podman[35120]: 2025-07-08 08:49:53.307197324 -0400 EDT m=+0.025129594 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:53 managed-node1 podman[35120]: 2025-07-08 08:49:53.321875099 -0400 EDT m=+0.039807296 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:53 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5570. Jul 08 08:49:53 managed-node1 podman[35120]: 2025-07-08 08:49:53.373402715 -0400 EDT m=+0.091335406 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:53 managed-node1 podman[35120]: 2025-07-08 08:49:53.376403871 -0400 EDT m=+0.094335992 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:53 managed-node1 auth_test_1_kube-auth_test_1_kube[35167]: This container is intended for podman CI testing Jul 08 08:49:53 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:53 managed-node1 podman[35196]: 2025-07-08 08:49:53.423124804 -0400 EDT m=+0.031110290 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:53 managed-node1 podman[35196]: 2025-07-08 08:49:53.439764954 -0400 EDT m=+0.047750144 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:53 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5577. Jul 08 08:49:53 managed-node1 podman[35196]: 2025-07-08 08:49:53.534882508 -0400 EDT m=+0.142867772 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:53 managed-node1 auth_test_1_kube-auth_test_1_kube[35234]: This container is intended for podman CI testing Jul 08 08:49:53 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:53 managed-node1 podman[35196]: 2025-07-08 08:49:53.54100085 -0400 EDT m=+0.148985897 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:53 managed-node1 conmon[35234]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:53 managed-node1 podman[35238]: 2025-07-08 08:49:53.585159651 -0400 EDT m=+0.030526617 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:53 managed-node1 python3.12[35229]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:49:53 managed-node1 podman[35238]: 2025-07-08 08:49:53.600630294 -0400 EDT m=+0.045997182 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:53 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5584. Jul 08 08:49:53 managed-node1 podman[35238]: 2025-07-08 08:49:53.654569108 -0400 EDT m=+0.099936073 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:53 managed-node1 podman[35238]: 2025-07-08 08:49:53.658200253 -0400 EDT m=+0.103567101 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:53 managed-node1 auth_test_1_kube-auth_test_1_kube[35251]: This container is intended for podman CI testing Jul 08 08:49:53 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:53 managed-node1 podman[35274]: 2025-07-08 08:49:53.692985719 -0400 EDT m=+0.022754624 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:53 managed-node1 podman[35274]: 2025-07-08 08:49:53.705345161 -0400 EDT m=+0.035114032 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:53 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5591. Jul 08 08:49:53 managed-node1 podman[35274]: 2025-07-08 08:49:53.753180763 -0400 EDT m=+0.082949635 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:53 managed-node1 podman[35274]: 2025-07-08 08:49:53.755398526 -0400 EDT m=+0.085167433 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:53 managed-node1 auth_test_1_kube-auth_test_1_kube[35292]: This container is intended for podman CI testing Jul 08 08:49:53 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:53 managed-node1 podman[35296]: 2025-07-08 08:49:53.788253959 -0400 EDT m=+0.020142780 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:53 managed-node1 podman[35296]: 2025-07-08 08:49:53.80170614 -0400 EDT m=+0.033594946 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:53 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5598. Jul 08 08:49:53 managed-node1 podman[35296]: 2025-07-08 08:49:53.849098303 -0400 EDT m=+0.080987255 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:53 managed-node1 podman[35296]: 2025-07-08 08:49:53.851335171 -0400 EDT m=+0.083224015 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:53 managed-node1 auth_test_1_kube-auth_test_1_kube[35308]: This container is intended for podman CI testing Jul 08 08:49:53 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:53 managed-node1 podman[35312]: 2025-07-08 08:49:53.884555507 -0400 EDT m=+0.019570948 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:53 managed-node1 podman[35312]: 2025-07-08 08:49:53.896749294 -0400 EDT m=+0.031764716 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:53 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5605. Jul 08 08:49:53 managed-node1 podman[35312]: 2025-07-08 08:49:53.945718218 -0400 EDT m=+0.080733636 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:53 managed-node1 podman[35312]: 2025-07-08 08:49:53.949025596 -0400 EDT m=+0.084041052 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:53 managed-node1 auth_test_1_kube-auth_test_1_kube[35323]: This container is intended for podman CI testing Jul 08 08:49:53 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:53 managed-node1 podman[35327]: 2025-07-08 08:49:53.97821992 -0400 EDT m=+0.020059326 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:53 managed-node1 podman[35327]: 2025-07-08 08:49:53.99071089 -0400 EDT m=+0.032550283 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:54 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5612. Jul 08 08:49:54 managed-node1 podman[35327]: 2025-07-08 08:49:54.031033777 -0400 EDT m=+0.072873181 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:54 managed-node1 podman[35327]: 2025-07-08 08:49:54.033292818 -0400 EDT m=+0.075132290 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:54 managed-node1 auth_test_1_kube-auth_test_1_kube[35339]: This container is intended for podman CI testing Jul 08 08:49:54 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:54 managed-node1 podman[35343]: 2025-07-08 08:49:54.062431341 -0400 EDT m=+0.020319855 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:54 managed-node1 podman[35343]: 2025-07-08 08:49:54.074918588 -0400 EDT m=+0.032807096 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:54 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5619. Jul 08 08:49:54 managed-node1 podman[35343]: 2025-07-08 08:49:54.127795758 -0400 EDT m=+0.085684274 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:54 managed-node1 auth_test_1_kube-auth_test_1_kube[35355]: This container is intended for podman CI testing Jul 08 08:49:54 managed-node1 podman[35343]: 2025-07-08 08:49:54.130515559 -0400 EDT m=+0.088404096 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:54 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:54 managed-node1 podman[35359]: 2025-07-08 08:49:54.163020175 -0400 EDT m=+0.019485815 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:54 managed-node1 podman[35359]: 2025-07-08 08:49:54.175914914 -0400 EDT m=+0.032380598 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:54 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5626. Jul 08 08:49:54 managed-node1 podman[35359]: 2025-07-08 08:49:54.235781563 -0400 EDT m=+0.092247283 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:54 managed-node1 podman[35359]: 2025-07-08 08:49:54.240035881 -0400 EDT m=+0.096501608 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:54 managed-node1 auth_test_1_kube-auth_test_1_kube[35370]: This container is intended for podman CI testing Jul 08 08:49:54 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:54 managed-node1 conmon[35370]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:54 managed-node1 podman[35391]: 2025-07-08 08:49:54.285368099 -0400 EDT m=+0.028779315 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:54 managed-node1 podman[35391]: 2025-07-08 08:49:54.299881247 -0400 EDT m=+0.043292241 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:54 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5633. Jul 08 08:49:54 managed-node1 podman[35391]: 2025-07-08 08:49:54.354916875 -0400 EDT m=+0.098327991 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:54 managed-node1 podman[35391]: 2025-07-08 08:49:54.357432697 -0400 EDT m=+0.100843625 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:54 managed-node1 auth_test_1_kube-auth_test_1_kube[35448]: This container is intended for podman CI testing Jul 08 08:49:54 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:54 managed-node1 podman[35468]: 2025-07-08 08:49:54.396590414 -0400 EDT m=+0.030313114 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:54 managed-node1 podman[35468]: 2025-07-08 08:49:54.409494778 -0400 EDT m=+0.043216848 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:54 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5640. Jul 08 08:49:54 managed-node1 podman[35468]: 2025-07-08 08:49:54.469857567 -0400 EDT m=+0.103579579 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:54 managed-node1 auth_test_1_kube-auth_test_1_kube[35528]: This container is intended for podman CI testing Jul 08 08:49:54 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:54 managed-node1 podman[35468]: 2025-07-08 08:49:54.474841998 -0400 EDT m=+0.108564065 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:54 managed-node1 conmon[35528]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:54 managed-node1 podman[35535]: 2025-07-08 08:49:54.525306513 -0400 EDT m=+0.035156287 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:54 managed-node1 podman[35535]: 2025-07-08 08:49:54.539009865 -0400 EDT m=+0.048859754 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:54 managed-node1 python3.12[35533]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:49:54 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5647. Jul 08 08:49:54 managed-node1 podman[35535]: 2025-07-08 08:49:54.639294004 -0400 EDT m=+0.149143874 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:54 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:54 managed-node1 auth_test_1_kube-auth_test_1_kube[35547]: This container is intended for podman CI testing Jul 08 08:49:54 managed-node1 conmon[35547]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:54 managed-node1 podman[35535]: 2025-07-08 08:49:54.645180192 -0400 EDT m=+0.155029937 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:54 managed-node1 podman[35552]: 2025-07-08 08:49:54.688064641 -0400 EDT m=+0.031133612 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:54 managed-node1 podman[35552]: 2025-07-08 08:49:54.702336193 -0400 EDT m=+0.045404917 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:54 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5654. Jul 08 08:49:54 managed-node1 podman[35552]: 2025-07-08 08:49:54.753639417 -0400 EDT m=+0.096708160 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:54 managed-node1 podman[35552]: 2025-07-08 08:49:54.755959748 -0400 EDT m=+0.099028531 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:54 managed-node1 auth_test_1_kube-auth_test_1_kube[35588]: This container is intended for podman CI testing Jul 08 08:49:54 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:54 managed-node1 podman[35592]: 2025-07-08 08:49:54.788654551 -0400 EDT m=+0.019611415 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:54 managed-node1 podman[35592]: 2025-07-08 08:49:54.801611928 -0400 EDT m=+0.032568690 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:54 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5661. Jul 08 08:49:54 managed-node1 podman[35592]: 2025-07-08 08:49:54.849621261 -0400 EDT m=+0.080578035 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:54 managed-node1 podman[35592]: 2025-07-08 08:49:54.851914638 -0400 EDT m=+0.082871431 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:54 managed-node1 auth_test_1_kube-auth_test_1_kube[35603]: This container is intended for podman CI testing Jul 08 08:49:54 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:54 managed-node1 podman[35607]: 2025-07-08 08:49:54.881117621 -0400 EDT m=+0.020200537 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:54 managed-node1 podman[35607]: 2025-07-08 08:49:54.893647143 -0400 EDT m=+0.032730046 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 08 08:49:54 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5668. Jul 08 08:49:54 managed-node1 podman[35607]: 2025-07-08 08:49:54.94675282 -0400 EDT m=+0.085835930 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:54 managed-node1 podman[35607]: 2025-07-08 08:49:54.950642796 -0400 EDT m=+0.089725652 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:54 managed-node1 auth_test_1_kube-auth_test_1_kube[35619]: This container is intended for podman CI testing Jul 08 08:49:54 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:54 managed-node1 conmon[35619]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:54 managed-node1 podman[35647]: 2025-07-08 08:49:54.993222726 -0400 EDT m=+0.028202975 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:55 managed-node1 podman[35647]: 2025-07-08 08:49:55.008067372 -0400 EDT m=+0.043047680 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:55 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5675. Jul 08 08:49:55 managed-node1 podman[35647]: 2025-07-08 08:49:55.0606935 -0400 EDT m=+0.095673937 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:55 managed-node1 podman[35647]: 2025-07-08 08:49:55.063449151 -0400 EDT m=+0.098429455 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:55 managed-node1 auth_test_1_kube-auth_test_1_kube[35694]: This container is intended for podman CI testing Jul 08 08:49:55 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:55 managed-node1 podman[35716]: 2025-07-08 08:49:55.112920274 -0400 EDT m=+0.036372698 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:55 managed-node1 podman[35716]: 2025-07-08 08:49:55.126570749 -0400 EDT m=+0.050023173 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:55 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5682. Jul 08 08:49:55 managed-node1 podman[35716]: 2025-07-08 08:49:55.186796426 -0400 EDT m=+0.110248987 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:55 managed-node1 auth_test_1_kube-auth_test_1_kube[35779]: This container is intended for podman CI testing Jul 08 08:49:55 managed-node1 podman[35716]: 2025-07-08 08:49:55.19058706 -0400 EDT m=+0.114039398 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:55 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:55 managed-node1 podman[35783]: 2025-07-08 08:49:55.237215238 -0400 EDT m=+0.031105664 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:55 managed-node1 podman[35783]: 2025-07-08 08:49:55.25293569 -0400 EDT m=+0.046825984 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:55 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5689. Jul 08 08:49:55 managed-node1 podman[35783]: 2025-07-08 08:49:55.342917667 -0400 EDT m=+0.136808092 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:55 managed-node1 auth_test_1_kube-auth_test_1_kube[35794]: This container is intended for podman CI testing Jul 08 08:49:55 managed-node1 podman[35783]: 2025-07-08 08:49:55.349068439 -0400 EDT m=+0.142958773 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:55 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:55 managed-node1 conmon[35794]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events Jul 08 08:49:55 managed-node1 podman[35799]: 2025-07-08 08:49:55.397578783 -0400 EDT m=+0.029895789 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:55 managed-node1 podman[35799]: 2025-07-08 08:49:55.411485494 -0400 EDT m=+0.043802478 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 08 08:49:55 managed-node1 python3.12[35778]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jul 08 08:49:55 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5696. Jul 08 08:49:55 managed-node1 podman[35799]: 2025-07-08 08:49:55.524711388 -0400 EDT m=+0.157028332 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:55 managed-node1 podman[35799]: 2025-07-08 08:49:55.530282716 -0400 EDT m=+0.162599610 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:55 managed-node1 auth_test_1_kube-auth_test_1_kube[35811]: This container is intended for podman CI testing Jul 08 08:49:55 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:55 managed-node1 systemd[1]: Reload requested from client PID 35818 ('systemctl') (unit session-5.scope)... Jul 08 08:49:55 managed-node1 systemd[1]: Reloading... Jul 08 08:49:55 managed-node1 podman[35817]: 2025-07-08 08:49:55.593084177 -0400 EDT m=+0.047755188 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:55 managed-node1 podman[35817]: 2025-07-08 08:49:55.609674353 -0400 EDT m=+0.064345340 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:55 managed-node1 systemd-rc-local-generator[35872]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 08 08:49:55 managed-node1 systemd[1]: Reloading finished in 229 ms. Jul 08 08:49:55 managed-node1 systemd[1]: Stopping podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service - A template for running K8s workloads via podman-kube-play... â–‘â–‘ Subject: A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has begun execution â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has begun execution. â–‘â–‘ â–‘â–‘ The job identifier is 5703. Jul 08 08:49:55 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container. â–‘â–‘ Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5704. Jul 08 08:49:55 managed-node1 podman[35817]: 2025-07-08 08:49:55.870407173 -0400 EDT m=+0.325078107 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 08 08:49:55 managed-node1 podman[35817]: 2025-07-08 08:49:55.873119723 -0400 EDT m=+0.327790771 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:55 managed-node1 auth_test_1_kube-auth_test_1_kube[35885]: This container is intended for podman CI testing Jul 08 08:49:55 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state. Jul 08 08:49:55 managed-node1 podman[35886]: 2025-07-08 08:49:55.890939078 -0400 EDT m=+0.054229148 pod stop c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe (image=, name=auth_test_1_kube) Jul 08 08:49:55 managed-node1 podman[35886]: 2025-07-08 08:49:55.895856762 -0400 EDT m=+0.059146999 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:55 managed-node1 systemd[1]: var-lib-containers-storage-overlay-a3f2d24d69694007b4135875ca1c98dc5a11a388333a5edad571fa1e5b8ff0f8-merged.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay-a3f2d24d69694007b4135875ca1c98dc5a11a388333a5edad571fa1e5b8ff0f8-merged.mount has successfully entered the 'dead' state. Jul 08 08:49:55 managed-node1 podman[35886]: 2025-07-08 08:49:55.93896244 -0400 EDT m=+0.102252402 container cleanup 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 08 08:49:55 managed-node1 systemd[1]: libpod-48ec4542d282038db01fc6c7ad1ebb663235fcef10f4d93e5b03692ced2a2c70.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-48ec4542d282038db01fc6c7ad1ebb663235fcef10f4d93e5b03692ced2a2c70.scope has successfully entered the 'dead' state. Jul 08 08:49:55 managed-node1 podman[35886]: 2025-07-08 08:49:55.958546219 -0400 EDT m=+0.121836545 container died 48ec4542d282038db01fc6c7ad1ebb663235fcef10f4d93e5b03692ced2a2c70 (image=, name=c71b45337dde-infra, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:55 managed-node1 systemd[1]: run-p20064-i20364.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit run-p20064-i20364.scope has successfully entered the 'dead' state. Jul 08 08:49:55 managed-node1 kernel: podman1: port 1(veth1) entered disabled state Jul 08 08:49:55 managed-node1 kernel: veth1 (unregistering): left allmulticast mode Jul 08 08:49:55 managed-node1 kernel: veth1 (unregistering): left promiscuous mode Jul 08 08:49:55 managed-node1 kernel: podman1: port 1(veth1) entered disabled state Jul 08 08:49:56 managed-node1 NetworkManager[721]: [1751978996.0048] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jul 08 08:49:56 managed-node1 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... â–‘â–‘ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit NetworkManager-dispatcher.service has begun execution. â–‘â–‘ â–‘â–‘ The job identifier is 5711. Jul 08 08:49:56 managed-node1 systemd[1]: run-netns-netns\x2d23bde762\x2d8f16\x2d7128\x2db248\x2d149bdcabb8bf.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit run-netns-netns\x2d23bde762\x2d8f16\x2d7128\x2db248\x2d149bdcabb8bf.mount has successfully entered the 'dead' state. Jul 08 08:49:56 managed-node1 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. â–‘â–‘ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit NetworkManager-dispatcher.service has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5711. Jul 08 08:49:56 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-48ec4542d282038db01fc6c7ad1ebb663235fcef10f4d93e5b03692ced2a2c70-rootfs-merge.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay\x2dcontainers-48ec4542d282038db01fc6c7ad1ebb663235fcef10f4d93e5b03692ced2a2c70-rootfs-merge.mount has successfully entered the 'dead' state. Jul 08 08:49:56 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-48ec4542d282038db01fc6c7ad1ebb663235fcef10f4d93e5b03692ced2a2c70-userdata-shm.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay\x2dcontainers-48ec4542d282038db01fc6c7ad1ebb663235fcef10f4d93e5b03692ced2a2c70-userdata-shm.mount has successfully entered the 'dead' state. Jul 08 08:49:56 managed-node1 podman[35886]: 2025-07-08 08:49:56.064750431 -0400 EDT m=+0.228040415 container cleanup 48ec4542d282038db01fc6c7ad1ebb663235fcef10f4d93e5b03692ced2a2c70 (image=, name=c71b45337dde-infra, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:56 managed-node1 systemd[1]: Removed slice machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice - cgroup machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice. â–‘â–‘ Subject: A stop job for unit machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice has finished â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A stop job for unit machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice has finished. â–‘â–‘ â–‘â–‘ The job identifier is 5790 and the job result is done. Jul 08 08:49:56 managed-node1 systemd[1]: machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice: Consumed 1.811s CPU time, 1M memory peak. â–‘â–‘ Subject: Resources consumed by unit runtime â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice completed and consumed the indicated resources. Jul 08 08:49:56 managed-node1 podman[35886]: 2025-07-08 08:49:56.071432875 -0400 EDT m=+0.234722869 pod stop c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe (image=, name=auth_test_1_kube) Jul 08 08:49:56 managed-node1 systemd[1]: machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice: Failed to open /run/systemd/transient/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice: No such file or directory Jul 08 08:49:56 managed-node1 podman[35886]: 2025-07-08 08:49:56.098237058 -0400 EDT m=+0.261527049 container remove 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 08 08:49:56 managed-node1 podman[35886]: 2025-07-08 08:49:56.125616889 -0400 EDT m=+0.288906885 container remove 48ec4542d282038db01fc6c7ad1ebb663235fcef10f4d93e5b03692ced2a2c70 (image=, name=c71b45337dde-infra, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:56 managed-node1 systemd[1]: machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice: Failed to open /run/systemd/transient/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice: No such file or directory Jul 08 08:49:56 managed-node1 podman[35886]: 2025-07-08 08:49:56.134735427 -0400 EDT m=+0.298025390 pod remove c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe (image=, name=auth_test_1_kube) Jul 08 08:49:56 managed-node1 podman[35886]: 2025-07-08 08:49:56.138033275 -0400 EDT m=+0.301323520 container kill b4c59e52584f9100b63cd3e9f3678cfdc7322306f3c4eda8222b7e2f06624b58 (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:56 managed-node1 systemd[1]: libpod-b4c59e52584f9100b63cd3e9f3678cfdc7322306f3c4eda8222b7e2f06624b58.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b4c59e52584f9100b63cd3e9f3678cfdc7322306f3c4eda8222b7e2f06624b58.scope has successfully entered the 'dead' state. Jul 08 08:49:56 managed-node1 conmon[20044]: conmon b4c59e52584f9100b63c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/libpod-b4c59e52584f9100b63cd3e9f3678cfdc7322306f3c4eda8222b7e2f06624b58.scope/container/memory.events Jul 08 08:49:56 managed-node1 podman[35886]: 2025-07-08 08:49:56.145688213 -0400 EDT m=+0.308978342 container died b4c59e52584f9100b63cd3e9f3678cfdc7322306f3c4eda8222b7e2f06624b58 (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:56 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-b4c59e52584f9100b63cd3e9f3678cfdc7322306f3c4eda8222b7e2f06624b58-rootfs-merge.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay\x2dcontainers-b4c59e52584f9100b63cd3e9f3678cfdc7322306f3c4eda8222b7e2f06624b58-rootfs-merge.mount has successfully entered the 'dead' state. Jul 08 08:49:56 managed-node1 podman[35886]: 2025-07-08 08:49:56.204852934 -0400 EDT m=+0.368142936 container remove b4c59e52584f9100b63cd3e9f3678cfdc7322306f3c4eda8222b7e2f06624b58 (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 08 08:49:56 managed-node1 podman[35886]: Pods stopped: Jul 08 08:49:56 managed-node1 podman[35886]: c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe Jul 08 08:49:56 managed-node1 podman[35886]: Pods removed: Jul 08 08:49:56 managed-node1 podman[35886]: c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe Jul 08 08:49:56 managed-node1 podman[35886]: Secrets removed: Jul 08 08:49:56 managed-node1 podman[35886]: Volumes removed: Jul 08 08:49:56 managed-node1 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has successfully entered the 'dead' state. Jul 08 08:49:56 managed-node1 systemd[1]: Stopped podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service - A template for running K8s workloads via podman-kube-play. â–‘â–‘ Subject: A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has finished â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has finished. â–‘â–‘ â–‘â–‘ The job identifier is 5703 and the job result is done. Jul 08 08:49:56 managed-node1 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service: Consumed 33.643s CPU time, 37.2M memory peak. â–‘â–‘ Subject: Resources consumed by unit runtime â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service completed and consumed the indicated resources. Jul 08 08:49:56 managed-node1 python3.12[36105]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:49:56 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-b4c59e52584f9100b63cd3e9f3678cfdc7322306f3c4eda8222b7e2f06624b58-userdata-shm.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay\x2dcontainers-b4c59e52584f9100b63cd3e9f3678cfdc7322306f3c4eda8222b7e2f06624b58-userdata-shm.mount has successfully entered the 'dead' state. Jul 08 08:49:57 managed-node1 python3.12[36262]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 08 08:49:57 managed-node1 python3.12[36262]: ansible-containers.podman.podman_play version: 5.5.1, kube file /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml Jul 08 08:49:57 managed-node1 python3.12[36430]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:49:58 managed-node1 python3.12[36585]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:50:00 managed-node1 python3.12[36742]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jul 08 08:50:00 managed-node1 systemd[1]: Reload requested from client PID 36745 ('systemctl') (unit session-5.scope)... Jul 08 08:50:00 managed-node1 systemd[1]: Reloading... Jul 08 08:50:00 managed-node1 systemd-rc-local-generator[36793]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 08 08:50:00 managed-node1 systemd[1]: Reloading finished in 203 ms. Jul 08 08:50:01 managed-node1 python3.12[36957]: ansible-stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:50:01 managed-node1 python3.12[37270]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:50:04 managed-node1 python3.12[37582]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:50:05 managed-node1 python3.12[37743]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:50:06 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jul 08 08:50:07 managed-node1 python3.12[37901]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:50:09 managed-node1 python3.12[38058]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:50:10 managed-node1 python3.12[38215]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:50:11 managed-node1 python3.12[38372]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:50:12 managed-node1 python3.12[38528]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jul 08 08:50:12 managed-node1 python3.12[38685]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:50:13 managed-node1 python3.12[38840]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:50:14 managed-node1 python3.12[38995]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:50:15 managed-node1 python3.12[39152]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jul 08 08:50:15 managed-node1 systemd[1]: Reload requested from client PID 39155 ('systemctl') (unit session-5.scope)... Jul 08 08:50:15 managed-node1 systemd[1]: Reloading... Jul 08 08:50:15 managed-node1 systemd-rc-local-generator[39190]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 08 08:50:15 managed-node1 systemd[1]: Reloading finished in 216 ms. Jul 08 08:50:16 managed-node1 python3.12[39364]: ansible-stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:50:17 managed-node1 python3.12[39676]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:50:19 managed-node1 python3.12[39987]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:50:20 managed-node1 python3.12[40148]: ansible-getent Invoked with database=passwd key=auth_test_user1 fail_key=False service=None split=None Jul 08 08:50:20 managed-node1 python3.12[40304]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:50:21 managed-node1 python3.12[40460]: ansible-user Invoked with name=auth_test_user1 state=absent non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jul 08 08:50:21 managed-node1 python3.12[40616]: ansible-file Invoked with path=/home/auth_test_user1 state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:50:22 managed-node1 python3.12[40771]: ansible-ansible.legacy.command Invoked with _raw_params=podman inspect podman_registry --format '{{range .}}{{range .Mounts}}{{if eq .Type "volume"}}{{.Name}}{{end}}{{end}}{{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:50:22 managed-node1 python3.12[40935]: ansible-ansible.legacy.command Invoked with _raw_params=podman rm -f podman_registry _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:50:22 managed-node1 systemd[1]: libpod-f2c95e55ef894afd89ddd278d5b3c1eaf43e3665402627d4291a7a7f163e81bc.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-f2c95e55ef894afd89ddd278d5b3c1eaf43e3665402627d4291a7a7f163e81bc.scope has successfully entered the 'dead' state. Jul 08 08:50:22 managed-node1 podman[40936]: 2025-07-08 08:50:22.67414362 -0400 EDT m=+0.041656966 container died f2c95e55ef894afd89ddd278d5b3c1eaf43e3665402627d4291a7a7f163e81bc (image=quay.io/libpod/registry:2.8.2, name=podman_registry) Jul 08 08:50:22 managed-node1 kernel: podman0: port 1(veth0) entered disabled state Jul 08 08:50:22 managed-node1 kernel: veth0 (unregistering): left allmulticast mode Jul 08 08:50:22 managed-node1 kernel: veth0 (unregistering): left promiscuous mode Jul 08 08:50:22 managed-node1 kernel: podman0: port 1(veth0) entered disabled state Jul 08 08:50:22 managed-node1 NetworkManager[721]: [1751979022.7196] device (podman0): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jul 08 08:50:22 managed-node1 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... â–‘â–‘ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit NetworkManager-dispatcher.service has begun execution. â–‘â–‘ â–‘â–‘ The job identifier is 5791. Jul 08 08:50:22 managed-node1 systemd[1]: run-netns-netns\x2d470568dd\x2dcb1f\x2d0545\x2d4ce3\x2d303f19bb0757.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit run-netns-netns\x2d470568dd\x2dcb1f\x2d0545\x2d4ce3\x2d303f19bb0757.mount has successfully entered the 'dead' state. Jul 08 08:50:22 managed-node1 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. â–‘â–‘ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit NetworkManager-dispatcher.service has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 5791. Jul 08 08:50:22 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-f2c95e55ef894afd89ddd278d5b3c1eaf43e3665402627d4291a7a7f163e81bc-userdata-shm.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay\x2dcontainers-f2c95e55ef894afd89ddd278d5b3c1eaf43e3665402627d4291a7a7f163e81bc-userdata-shm.mount has successfully entered the 'dead' state. Jul 08 08:50:22 managed-node1 systemd[1]: var-lib-containers-storage-overlay-1edf2a61e7717e54fe2166194ec642baa4ae0ab463595f28a4f079d84846a39e-merged.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay-1edf2a61e7717e54fe2166194ec642baa4ae0ab463595f28a4f079d84846a39e-merged.mount has successfully entered the 'dead' state. Jul 08 08:50:22 managed-node1 podman[40936]: 2025-07-08 08:50:22.814620775 -0400 EDT m=+0.182133970 container remove f2c95e55ef894afd89ddd278d5b3c1eaf43e3665402627d4291a7a7f163e81bc (image=quay.io/libpod/registry:2.8.2, name=podman_registry) Jul 08 08:50:22 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 08 08:50:22 managed-node1 systemd[1]: libpod-conmon-f2c95e55ef894afd89ddd278d5b3c1eaf43e3665402627d4291a7a7f163e81bc.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-conmon-f2c95e55ef894afd89ddd278d5b3c1eaf43e3665402627d4291a7a7f163e81bc.scope has successfully entered the 'dead' state. Jul 08 08:50:23 managed-node1 python3.12[41128]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume rm adb0a365ad5e043225f1b238691d0f909d8df3bc7ab8077f6ef439c4ae00bfe7 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:50:23 managed-node1 podman[41129]: 2025-07-08 08:50:23.305299712 -0400 EDT m=+0.024461642 volume remove adb0a365ad5e043225f1b238691d0f909d8df3bc7ab8077f6ef439c4ae00bfe7 Jul 08 08:50:23 managed-node1 python3.12[41290]: ansible-file Invoked with path=/tmp/lsr_g_pgovnt_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:50:25 managed-node1 python3.12[41496]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jul 08 08:50:26 managed-node1 python3.12[41680]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:50:27 managed-node1 python3.12[41835]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:50:29 managed-node1 python3.12[42145]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:50:30 managed-node1 python3.12[42306]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 08 08:50:30 managed-node1 python3.12[42462]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:50:32 managed-node1 python3.12[42619]: ansible-tempfile Invoked with state=directory prefix=lsr_podman_config_ suffix= path=None Jul 08 08:50:32 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jul 08 08:50:32 managed-node1 python3.12[42774]: ansible-ansible.legacy.command Invoked with _raw_params=tar --ignore-failed-read -c -P -v -p -f /tmp/lsr_podman_config_xpiczilj/backup.tar /etc/containers/containers.conf.d/50-systemroles.conf /etc/containers/registries.conf.d/50-systemroles.conf /etc/containers/storage.conf /etc/containers/policy.json _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:50:33 managed-node1 python3.12[42931]: ansible-user Invoked with name=user1 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jul 08 08:50:33 managed-node1 useradd[42933]: new group: name=user1, GID=1000 Jul 08 08:50:33 managed-node1 useradd[42933]: new user: name=user1, UID=1000, GID=1000, home=/home/user1, shell=/bin/bash, from=/dev/pts/0 Jul 08 08:50:35 managed-node1 python3.12[43243]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:50:36 managed-node1 python3.12[43404]: ansible-getent Invoked with database=passwd key=user1 fail_key=False service=None split=None Jul 08 08:50:36 managed-node1 python3.12[43560]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:50:36 managed-node1 python3.12[43717]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:50:37 managed-node1 python3.12[43873]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:50:38 managed-node1 python3.12[44029]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:50:38 managed-node1 python3.12[44184]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 08 08:50:38 managed-node1 python3.12[44309]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751979038.242837-13178-157508935633128/.source.conf _original_basename=.3t631c0n follow=False checksum=b1776092f2908d76e11fd6af87267469b2c17d5a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:50:39 managed-node1 python3.12[44464]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:50:39 managed-node1 python3.12[44619]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 08 08:50:40 managed-node1 python3.12[44744]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751979039.5151405-13207-260225766438174/.source.conf _original_basename=.8hb8eqwc follow=False checksum=fde25488ce7040f1639af7bfc88ed125318cc0b0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:50:40 managed-node1 python3.12[44899]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:50:40 managed-node1 python3.12[45054]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 08 08:50:41 managed-node1 python3.12[45179]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/storage.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751979040.6993687-13236-206291270506142/.source.conf _original_basename=.24a9rkzz follow=False checksum=38f015f4780579bd388dd955b42916199fd7fe19 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:50:41 managed-node1 python3.12[45334]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:50:42 managed-node1 python3.12[45489]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:50:42 managed-node1 python3.12[45644]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 08 08:50:42 managed-node1 python3.12[45769]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/policy.json owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751979042.2485042-13279-227893683407134/.source.json _original_basename=._jtk04da follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:50:43 managed-node1 python3.12[45924]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:50:43 managed-node1 python3.12[46081]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:50:44 managed-node1 python3.12[46237]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:50:45 managed-node1 python3.12[46393]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:50:46 managed-node1 python3.12[46828]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:50:47 managed-node1 python3.12[46985]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:50:47 managed-node1 python3.12[47141]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:50:48 managed-node1 python3.12[47300]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:50:49 managed-node1 python3.12[47457]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:50:49 managed-node1 python3.12[47614]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:50:49 managed-node1 python3.12[47771]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:50:50 managed-node1 python3.12[47928]: ansible-ansible.legacy.command Invoked with _raw_params=grep 'container_name_as_hostname[ ]*=[ ]*true' /home/user1/.config/containers/containers.conf.d/50-systemroles.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:50:52 managed-node1 python3.12[48239]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:50:53 managed-node1 python3.12[48401]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:50:53 managed-node1 python3.12[48558]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:50:54 managed-node1 python3.12[48714]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:50:54 managed-node1 python3.12[48870]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:50:55 managed-node1 python3.12[49025]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 08 08:50:55 managed-node1 python3.12[49103]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf _original_basename=.0gnlhomz recurse=False state=file path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:50:56 managed-node1 python3.12[49258]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:50:56 managed-node1 python3.12[49413]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 08 08:50:56 managed-node1 python3.12[49491]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf _original_basename=.accmlokk recurse=False state=file path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:50:57 managed-node1 python3.12[49646]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:50:57 managed-node1 python3.12[49801]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 08 08:50:57 managed-node1 python3.12[49879]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/storage.conf _original_basename=.pvcmfj7i recurse=False state=file path=/home/user1/.config/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:50:58 managed-node1 python3.12[50034]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:50:58 managed-node1 python3.12[50189]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:50:59 managed-node1 python3.12[50346]: ansible-slurp Invoked with path=/home/user1/.config/containers/policy.json src=/home/user1/.config/containers/policy.json Jul 08 08:51:00 managed-node1 python3.12[50501]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:00 managed-node1 python3.12[50658]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:51:00 managed-node1 python3.12[50814]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:51:01 managed-node1 python3.12[50970]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:03 managed-node1 python3.12[51358]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:03 managed-node1 python3.12[51515]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:51:03 managed-node1 python3.12[51671]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:51:04 managed-node1 python3.12[51827]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:05 managed-node1 python3.12[51984]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:05 managed-node1 python3.12[52141]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:05 managed-node1 python3.12[52298]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:08 managed-node1 python3.12[52610]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:51:08 managed-node1 python3.12[52771]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 08 08:51:09 managed-node1 python3.12[52927]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:10 managed-node1 python3.12[53084]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:10 managed-node1 python3.12[53239]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 08 08:51:11 managed-node1 python3.12[53364]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/containers.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751979070.540365-14172-121943904920472/.source.conf _original_basename=.q56vcedp follow=False checksum=b1776092f2908d76e11fd6af87267469b2c17d5a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:11 managed-node1 python3.12[53519]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:12 managed-node1 python3.12[53674]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 08 08:51:12 managed-node1 python3.12[53799]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/registries.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751979071.810814-14217-46013201585375/.source.conf _original_basename=.p9it418x follow=False checksum=fde25488ce7040f1639af7bfc88ed125318cc0b0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:12 managed-node1 python3.12[53954]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:13 managed-node1 python3.12[54109]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 08 08:51:13 managed-node1 python3.12[54234]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/storage.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751979073.1248894-14268-85562215010734/.source.conf _original_basename=.scvdoudg follow=False checksum=38f015f4780579bd388dd955b42916199fd7fe19 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:14 managed-node1 python3.12[54389]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:14 managed-node1 python3.12[54544]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:15 managed-node1 python3.12[54701]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json Jul 08 08:51:15 managed-node1 python3.12[54856]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 08 08:51:15 managed-node1 python3.12[54983]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/policy.json owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751979075.3179543-14355-115139051150641/.source.json _original_basename=.bjdfcfs5 follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:16 managed-node1 python3.12[55138]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:17 managed-node1 python3.12[55295]: ansible-file Invoked with path=/root/.config/containers state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:19 managed-node1 python3.12[55732]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:20 managed-node1 python3.12[55889]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:20 managed-node1 python3.12[56046]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:21 managed-node1 python3.12[56203]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:21 managed-node1 python3.12[56360]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:23 managed-node1 python3.12[56674]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:51:24 managed-node1 python3.12[56836]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:25 managed-node1 python3.12[56993]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:26 managed-node1 python3.12[57148]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 08 08:51:26 managed-node1 python3.12[57226]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/containers.conf.d/50-systemroles.conf _original_basename=.qxvg1nyv recurse=False state=file path=/etc/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:26 managed-node1 python3.12[57381]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:27 managed-node1 python3.12[57536]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 08 08:51:27 managed-node1 python3.12[57614]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/registries.conf.d/50-systemroles.conf _original_basename=.rx7m24ak recurse=False state=file path=/etc/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:28 managed-node1 python3.12[57769]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:28 managed-node1 python3.12[57924]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 08 08:51:29 managed-node1 python3.12[58002]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/storage.conf _original_basename=.bdv1ih5b recurse=False state=file path=/etc/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:29 managed-node1 python3.12[58157]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:29 managed-node1 python3.12[58312]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:30 managed-node1 python3.12[58469]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json Jul 08 08:51:31 managed-node1 python3.12[58624]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:32 managed-node1 python3.12[58781]: ansible-file Invoked with path=/root/.config/containers state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:34 managed-node1 python3.12[59169]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:35 managed-node1 python3.12[59326]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:35 managed-node1 python3.12[59483]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:35 managed-node1 python3.12[59640]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:36 managed-node1 python3.12[59797]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:36 managed-node1 python3.12[59954]: ansible-slurp Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf src=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf Jul 08 08:51:37 managed-node1 python3.12[60109]: ansible-slurp Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf src=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf Jul 08 08:51:37 managed-node1 python3.12[60264]: ansible-slurp Invoked with path=/home/user1/.config/containers/storage.conf src=/home/user1/.config/containers/storage.conf Jul 08 08:51:38 managed-node1 python3.12[60419]: ansible-slurp Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf src=/etc/containers/containers.conf.d/50-systemroles.conf Jul 08 08:51:38 managed-node1 python3.12[60574]: ansible-slurp Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf src=/etc/containers/registries.conf.d/50-systemroles.conf Jul 08 08:51:39 managed-node1 python3.12[60729]: ansible-slurp Invoked with path=/etc/containers/storage.conf src=/etc/containers/storage.conf Jul 08 08:51:41 managed-node1 python3.12[61039]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:51:42 managed-node1 python3.12[61200]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:43 managed-node1 python3.12[61357]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:43 managed-node1 python3.12[61512]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 08 08:51:44 managed-node1 python3.12[61639]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/containers.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751979103.6532533-15389-229593234271028/.source.conf _original_basename=.nwfb8ndj follow=False checksum=9694c1d1c700a6435eecf4066b052584f4ee94c0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:44 managed-node1 python3.12[61794]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:45 managed-node1 python3.12[61949]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 08 08:51:45 managed-node1 python3.12[62027]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/registries.conf.d/50-systemroles.conf _original_basename=.fsfm1l81 recurse=False state=file path=/etc/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:46 managed-node1 python3.12[62182]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:46 managed-node1 python3.12[62337]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 08 08:51:46 managed-node1 python3.12[62415]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/storage.conf _original_basename=.rj1uoysr recurse=False state=file path=/etc/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:47 managed-node1 python3.12[62570]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:47 managed-node1 python3.12[62725]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:48 managed-node1 python3.12[62882]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json Jul 08 08:51:48 managed-node1 python3.12[63037]: ansible-slurp Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf src=/etc/containers/containers.conf.d/50-systemroles.conf Jul 08 08:51:49 managed-node1 python3.12[63192]: ansible-file Invoked with state=absent path=/etc/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:49 managed-node1 python3.12[63347]: ansible-file Invoked with state=absent path=/etc/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:50 managed-node1 python3.12[63502]: ansible-file Invoked with state=absent path=/etc/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:50 managed-node1 python3.12[63657]: ansible-file Invoked with state=absent path=/etc/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:50 managed-node1 python3.12[63812]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:51 managed-node1 python3.12[63967]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:51 managed-node1 python3.12[64122]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:51 managed-node1 python3.12[64277]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:52 managed-node1 python3.12[64432]: ansible-file Invoked with state=absent path=/root/.config/containers/auth.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:52 managed-node1 python3.12[64587]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/auth.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:53 managed-node1 python3.12[64742]: ansible-ansible.legacy.command Invoked with _raw_params=tar xfvpP /tmp/lsr_podman_config_xpiczilj/backup.tar _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:51:53 managed-node1 python3.12[64898]: ansible-file Invoked with state=absent path=/tmp/lsr_podman_config_xpiczilj recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:51:55 managed-node1 python3.12[65104]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jul 08 08:51:56 managed-node1 python3.12[65261]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:56 managed-node1 systemd[4462]: Created slice background.slice - User Background Tasks Slice. â–‘â–‘ Subject: A start job for unit UNIT has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit UNIT has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 14. Jul 08 08:51:56 managed-node1 systemd[4462]: Starting systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories... â–‘â–‘ Subject: A start job for unit UNIT has begun execution â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit UNIT has begun execution. â–‘â–‘ â–‘â–‘ The job identifier is 13. Jul 08 08:51:56 managed-node1 systemd[4462]: Finished systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories. â–‘â–‘ Subject: A start job for unit UNIT has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit UNIT has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 13. Jul 08 08:51:56 managed-node1 python3.12[65418]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:51:58 managed-node1 python3.12[65728]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:52:00 managed-node1 python3.12[65889]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 08 08:52:00 managed-node1 python3.12[66045]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:52:03 managed-node1 python3.12[66253]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jul 08 08:52:06 managed-node1 python3.12[66437]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:52:06 managed-node1 python3.12[66592]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:52:08 managed-node1 python3.12[66902]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:52:09 managed-node1 python3.12[67063]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 08 08:52:10 managed-node1 python3.12[67219]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:52:13 managed-node1 python3.12[67427]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jul 08 08:52:14 managed-node1 python3.12[67611]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:52:15 managed-node1 python3.12[67766]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:52:17 managed-node1 python3.12[68076]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:52:18 managed-node1 python3.12[68237]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 08 08:52:18 managed-node1 python3.12[68393]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:52:20 managed-node1 python3.12[68550]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:52:22 managed-node1 python3.12[68707]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:52:22 managed-node1 python3.12[68862]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 08 08:52:23 managed-node1 python3.12[68987]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751979142.423505-17133-38145694146989/.source.container dest=/etc/containers/systemd/nopull.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=670d64fc68a9768edb20cad26df2acc703542d85 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:52:25 managed-node1 python3.12[69297]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:52:26 managed-node1 python3.12[69458]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:52:27 managed-node1 python3.12[69615]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:52:29 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 08 08:52:29 managed-node1 podman[69781]: 2025-07-08 08:52:29.239882519 -0400 EDT m=+0.022057902 image pull-error this_is_a_bogus_image:latest short-name resolution enforced but cannot prompt without a TTY Jul 08 08:52:29 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 08 08:52:29 managed-node1 python3.12[69942]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:52:30 managed-node1 python3.12[70097]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/bogus.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 08 08:52:30 managed-node1 python3.12[70222]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751979149.8133175-17320-179092701161425/.source.container dest=/etc/containers/systemd/bogus.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=1d087e679d135214e8ac9ccaf33b2222916efb7f backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 08 08:52:33 managed-node1 python3.12[70532]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:52:34 managed-node1 python3.12[70693]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:52:36 managed-node1 python3.12[70851]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:52:37 managed-node1 python3.12[71008]: ansible-systemd Invoked with name=nopull.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jul 08 08:52:37 managed-node1 python3.12[71164]: ansible-stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 08 08:52:38 managed-node1 python3.12[71476]: ansible-ansible.legacy.command Invoked with _raw_params=set -x set -o pipefail exec 1>&2 #podman volume rm --all #podman network prune -f podman volume ls podman network ls podman secret ls podman container ls podman pod ls podman images systemctl list-units | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:52:38 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 08 08:52:38 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 08 08:52:39 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 08 08:52:40 managed-node1 python3.12[71831]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 08 08:52:41 managed-node1 python3.12[71994]: ansible-getent Invoked with database=passwd key=user_quadlet_basic fail_key=False service=None split=None Jul 08 08:52:42 managed-node1 python3.12[72150]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None PLAY RECAP ********************************************************************* managed-node1 : ok=110 changed=2 unreachable=0 failed=2 skipped=190 rescued=2 ignored=0 SYSTEM ROLES ERRORS BEGIN v1 [ { "ansible_version": "2.17.12", "end_time": "2025-07-08T12:52:38.458205+00:00Z", "host": "managed-node1", "message": "template error while templating string: Could not load \"podman_from_ini\": 'podman_from_ini'. String: {{ __podman_quadlet_raw.content | b64decode | podman_from_ini }}. Could not load \"podman_from_ini\": 'podman_from_ini'", "start_time": "2025-07-08T12:52:38.440119+00:00Z", "task_name": "Parse quadlet file", "task_path": "/tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12" }, { "ansible_version": "2.17.12", "delta": "0:00:00.169423", "end_time": "2025-07-08 08:52:38.951320", "host": "managed-node1", "message": "non-zero return code", "rc": 1, "start_time": "2025-07-08 08:52:38.781897", "stderr": "+ set -o pipefail\n+ exec\n+ podman volume ls\nDRIVER VOLUME NAME\n+ podman network ls\nNETWORK ID NAME DRIVER\n2f259bab93aa podman bridge\n90e7467522a5 podman-default-kube-network bridge\n+ podman secret ls\nID NAME DRIVER CREATED UPDATED\n+ podman container ls\nCONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n+ podman pod ls\nPOD ID NAME STATUS CREATED INFRA ID # OF CONTAINERS\n+ podman images\nREPOSITORY TAG IMAGE ID CREATED SIZE\nquay.io/libpod/registry 2.8.2 0030ba3d620c 23 months ago 24.6 MB\nlocalhost:5000/libpod/testimage 20210610 9f9ec7f2fdef 4 years ago 7.99 MB\nquay.io/libpod/testimage 20210610 9f9ec7f2fdef 4 years ago 7.99 MB\n+ systemctl list-units\n+ grep quadlet", "task_name": "Debug3", "task_path": "/tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:270" }, { "ansible_version": "2.17.12", "end_time": "2025-07-08T12:52:41.722461+00:00Z", "host": "managed-node1", "message": "The given podman user [user_quadlet_basic] does not exist - cannot continue\n", "start_time": "2025-07-08T12:52:41.701726+00:00Z", "task_name": "Fail if user does not exist", "task_path": "/tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9" }, { "ansible_version": "2.17.12", "delta": "0:00:00.037839", "end_time": "2025-07-08 08:52:42.082861", "host": "managed-node1", "message": "", "rc": 0, "start_time": "2025-07-08 08:52:42.045022", "stdout": "Jul 08 08:49:45 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5038.\nJul 08 08:49:45 managed-node1 podman[33270]: 2025-07-08 08:49:45.593321911 -0400 EDT m=+0.085104679 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:45 managed-node1 podman[33270]: 2025-07-08 08:49:45.595600043 -0400 EDT m=+0.087382830 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:45 managed-node1 auth_test_1_kube-auth_test_1_kube[33282]: This container is intended for podman CI testing\nJul 08 08:49:45 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:45 managed-node1 podman[33286]: 2025-07-08 08:49:45.636439329 -0400 EDT m=+0.028271353 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:45 managed-node1 podman[33286]: 2025-07-08 08:49:45.649856804 -0400 EDT m=+0.041688544 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:45 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5045.\nJul 08 08:49:45 managed-node1 podman[33286]: 2025-07-08 08:49:45.708861153 -0400 EDT m=+0.100693111 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:45 managed-node1 podman[33286]: 2025-07-08 08:49:45.712686381 -0400 EDT m=+0.104518163 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:45 managed-node1 auth_test_1_kube-auth_test_1_kube[33350]: This container is intended for podman CI testing\nJul 08 08:49:45 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:45 managed-node1 podman[33380]: 2025-07-08 08:49:45.751009794 -0400 EDT m=+0.023249277 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:45 managed-node1 podman[33380]: 2025-07-08 08:49:45.76715798 -0400 EDT m=+0.039397338 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:45 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5052.\nJul 08 08:49:45 managed-node1 podman[33380]: 2025-07-08 08:49:45.82422548 -0400 EDT m=+0.096464922 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:45 managed-node1 auth_test_1_kube-auth_test_1_kube[33439]: This container is intended for podman CI testing\nJul 08 08:49:45 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:45 managed-node1 conmon[33439]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:45 managed-node1 podman[33380]: 2025-07-08 08:49:45.829510409 -0400 EDT m=+0.101749551 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:45 managed-node1 podman[33448]: 2025-07-08 08:49:45.877828923 -0400 EDT m=+0.034075082 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:45 managed-node1 podman[33448]: 2025-07-08 08:49:45.892890013 -0400 EDT m=+0.049136106 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:45 managed-node1 python3.12[33446]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:49:45 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5059.\nJul 08 08:49:45 managed-node1 podman[33448]: 2025-07-08 08:49:45.999158821 -0400 EDT m=+0.155405154 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:46 managed-node1 auth_test_1_kube-auth_test_1_kube[33460]: This container is intended for podman CI testing\nJul 08 08:49:46 managed-node1 podman[33448]: 2025-07-08 08:49:46.005068376 -0400 EDT m=+0.161314418 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:46 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:46 managed-node1 podman[33470]: 2025-07-08 08:49:46.051486323 -0400 EDT m=+0.031554578 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:46 managed-node1 podman[33470]: 2025-07-08 08:49:46.066075246 -0400 EDT m=+0.046143244 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:46 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5066.\nJul 08 08:49:46 managed-node1 podman[33470]: 2025-07-08 08:49:46.117152823 -0400 EDT m=+0.097221140 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:46 managed-node1 podman[33470]: 2025-07-08 08:49:46.1198846 -0400 EDT m=+0.099952518 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:46 managed-node1 auth_test_1_kube-auth_test_1_kube[33507]: This container is intended for podman CI testing\nJul 08 08:49:46 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:46 managed-node1 podman[33511]: 2025-07-08 08:49:46.15055551 -0400 EDT m=+0.020976997 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:46 managed-node1 podman[33511]: 2025-07-08 08:49:46.163279094 -0400 EDT m=+0.033700461 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:46 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5073.\nJul 08 08:49:46 managed-node1 podman[33511]: 2025-07-08 08:49:46.213217596 -0400 EDT m=+0.083638969 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:46 managed-node1 podman[33511]: 2025-07-08 08:49:46.215501868 -0400 EDT m=+0.085923273 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:46 managed-node1 auth_test_1_kube-auth_test_1_kube[33522]: This container is intended for podman CI testing\nJul 08 08:49:46 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:46 managed-node1 podman[33526]: 2025-07-08 08:49:46.244185514 -0400 EDT m=+0.019318491 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:46 managed-node1 podman[33526]: 2025-07-08 08:49:46.256897767 -0400 EDT m=+0.032030708 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:46 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5080.\nJul 08 08:49:46 managed-node1 podman[33526]: 2025-07-08 08:49:46.308998409 -0400 EDT m=+0.084131346 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:46 managed-node1 podman[33526]: 2025-07-08 08:49:46.311242091 -0400 EDT m=+0.086375084 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:46 managed-node1 auth_test_1_kube-auth_test_1_kube[33538]: This container is intended for podman CI testing\nJul 08 08:49:46 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:46 managed-node1 conmon[33538]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:46 managed-node1 podman[33542]: 2025-07-08 08:49:46.341750805 -0400 EDT m=+0.021827804 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:46 managed-node1 podman[33542]: 2025-07-08 08:49:46.354633092 -0400 EDT m=+0.034710071 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:46 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5087.\nJul 08 08:49:46 managed-node1 podman[33542]: 2025-07-08 08:49:46.400711535 -0400 EDT m=+0.080788511 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:46 managed-node1 podman[33542]: 2025-07-08 08:49:46.40328957 -0400 EDT m=+0.083366573 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:46 managed-node1 auth_test_1_kube-auth_test_1_kube[33553]: This container is intended for podman CI testing\nJul 08 08:49:46 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:46 managed-node1 podman[33557]: 2025-07-08 08:49:46.43593079 -0400 EDT m=+0.019914898 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:46 managed-node1 podman[33557]: 2025-07-08 08:49:46.44840681 -0400 EDT m=+0.032390875 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:46 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5094.\nJul 08 08:49:46 managed-node1 podman[33557]: 2025-07-08 08:49:46.501216122 -0400 EDT m=+0.085200201 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:46 managed-node1 podman[33557]: 2025-07-08 08:49:46.504296699 -0400 EDT m=+0.088280830 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:46 managed-node1 auth_test_1_kube-auth_test_1_kube[33568]: This container is intended for podman CI testing\nJul 08 08:49:46 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:46 managed-node1 podman[33572]: 2025-07-08 08:49:46.533910603 -0400 EDT m=+0.019233571 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:46 managed-node1 podman[33572]: 2025-07-08 08:49:46.546704422 -0400 EDT m=+0.032027359 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:46 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5101.\nJul 08 08:49:46 managed-node1 podman[33572]: 2025-07-08 08:49:46.594679115 -0400 EDT m=+0.080002047 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:46 managed-node1 podman[33572]: 2025-07-08 08:49:46.597331818 -0400 EDT m=+0.082654790 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:46 managed-node1 auth_test_1_kube-auth_test_1_kube[33583]: This container is intended for podman CI testing\nJul 08 08:49:46 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:46 managed-node1 podman[33587]: 2025-07-08 08:49:46.629552133 -0400 EDT m=+0.020339325 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:46 managed-node1 podman[33587]: 2025-07-08 08:49:46.641895583 -0400 EDT m=+0.032682716 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:46 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5108.\nJul 08 08:49:46 managed-node1 podman[33587]: 2025-07-08 08:49:46.690758245 -0400 EDT m=+0.081545387 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:46 managed-node1 podman[33587]: 2025-07-08 08:49:46.692988727 -0400 EDT m=+0.083775886 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:46 managed-node1 auth_test_1_kube-auth_test_1_kube[33599]: This container is intended for podman CI testing\nJul 08 08:49:46 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:46 managed-node1 conmon[33599]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:46 managed-node1 podman[33603]: 2025-07-08 08:49:46.726894452 -0400 EDT m=+0.020112478 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:46 managed-node1 podman[33603]: 2025-07-08 08:49:46.739575229 -0400 EDT m=+0.032793244 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:46 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5115.\nJul 08 08:49:46 managed-node1 podman[33603]: 2025-07-08 08:49:46.794088093 -0400 EDT m=+0.087306240 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:46 managed-node1 auth_test_1_kube-auth_test_1_kube[33614]: This container is intended for podman CI testing\nJul 08 08:49:46 managed-node1 podman[33603]: 2025-07-08 08:49:46.79814541 -0400 EDT m=+0.091363541 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:46 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:46 managed-node1 podman[33640]: 2025-07-08 08:49:46.842178721 -0400 EDT m=+0.028105369 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:46 managed-node1 podman[33640]: 2025-07-08 08:49:46.856958194 -0400 EDT m=+0.042884619 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:46 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5122.\nJul 08 08:49:46 managed-node1 podman[33640]: 2025-07-08 08:49:46.907632511 -0400 EDT m=+0.093559004 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:46 managed-node1 podman[33640]: 2025-07-08 08:49:46.909868606 -0400 EDT m=+0.095795091 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:46 managed-node1 auth_test_1_kube-auth_test_1_kube[33695]: This container is intended for podman CI testing\nJul 08 08:49:46 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:46 managed-node1 conmon[33695]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:46 managed-node1 podman[33716]: 2025-07-08 08:49:46.956343788 -0400 EDT m=+0.028484493 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:46 managed-node1 podman[33716]: 2025-07-08 08:49:46.971819147 -0400 EDT m=+0.043959658 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:47 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5129.\nJul 08 08:49:47 managed-node1 podman[33716]: 2025-07-08 08:49:47.044987412 -0400 EDT m=+0.117128101 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:47 managed-node1 auth_test_1_kube-auth_test_1_kube[33774]: This container is intended for podman CI testing\nJul 08 08:49:47 managed-node1 podman[33716]: 2025-07-08 08:49:47.0492363 -0400 EDT m=+0.121376849 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:47 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:47 managed-node1 podman[33778]: 2025-07-08 08:49:47.098897718 -0400 EDT m=+0.032891859 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:47 managed-node1 podman[33778]: 2025-07-08 08:49:47.113649419 -0400 EDT m=+0.047643361 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:47 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5136.\nJul 08 08:49:47 managed-node1 podman[33778]: 2025-07-08 08:49:47.172175405 -0400 EDT m=+0.106169573 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:47 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:47 managed-node1 podman[33778]: 2025-07-08 08:49:47.177509501 -0400 EDT m=+0.111503424 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:47 managed-node1 conmon[33790]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:47 managed-node1 auth_test_1_kube-auth_test_1_kube[33790]: This container is intended for podman CI testing\nJul 08 08:49:47 managed-node1 python3.12[33772]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:49:47 managed-node1 podman[33794]: 2025-07-08 08:49:47.223151006 -0400 EDT m=+0.032410745 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:47 managed-node1 podman[33794]: 2025-07-08 08:49:47.237208679 -0400 EDT m=+0.046468524 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:47 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5143.\nJul 08 08:49:47 managed-node1 podman[33794]: 2025-07-08 08:49:47.296149541 -0400 EDT m=+0.105409245 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:47 managed-node1 podman[33794]: 2025-07-08 08:49:47.298412557 -0400 EDT m=+0.107672266 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:47 managed-node1 auth_test_1_kube-auth_test_1_kube[33832]: This container is intended for podman CI testing\nJul 08 08:49:47 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:47 managed-node1 podman[33836]: 2025-07-08 08:49:47.331266377 -0400 EDT m=+0.020000340 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:47 managed-node1 podman[33836]: 2025-07-08 08:49:47.34470389 -0400 EDT m=+0.033437826 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:47 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5150.\nJul 08 08:49:47 managed-node1 podman[33836]: 2025-07-08 08:49:47.404422265 -0400 EDT m=+0.093156201 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:47 managed-node1 podman[33836]: 2025-07-08 08:49:47.406943453 -0400 EDT m=+0.095677421 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:47 managed-node1 auth_test_1_kube-auth_test_1_kube[33847]: This container is intended for podman CI testing\nJul 08 08:49:47 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:47 managed-node1 podman[33851]: 2025-07-08 08:49:47.436908797 -0400 EDT m=+0.021001065 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:47 managed-node1 podman[33851]: 2025-07-08 08:49:47.449365459 -0400 EDT m=+0.033457632 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:47 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5157.\nJul 08 08:49:47 managed-node1 podman[33851]: 2025-07-08 08:49:47.499453503 -0400 EDT m=+0.083545709 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:47 managed-node1 podman[33851]: 2025-07-08 08:49:47.5018498 -0400 EDT m=+0.085942306 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:47 managed-node1 auth_test_1_kube-auth_test_1_kube[33863]: This container is intended for podman CI testing\nJul 08 08:49:47 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:47 managed-node1 podman[33867]: 2025-07-08 08:49:47.534736138 -0400 EDT m=+0.020486325 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:47 managed-node1 podman[33867]: 2025-07-08 08:49:47.547230152 -0400 EDT m=+0.032980251 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:47 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5164.\nJul 08 08:49:47 managed-node1 podman[33867]: 2025-07-08 08:49:47.597133016 -0400 EDT m=+0.082883207 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:47 managed-node1 podman[33867]: 2025-07-08 08:49:47.599409134 -0400 EDT m=+0.085159269 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:47 managed-node1 auth_test_1_kube-auth_test_1_kube[33878]: This container is intended for podman CI testing\nJul 08 08:49:47 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:47 managed-node1 podman[33882]: 2025-07-08 08:49:47.634721629 -0400 EDT m=+0.022523862 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:47 managed-node1 podman[33882]: 2025-07-08 08:49:47.646999059 -0400 EDT m=+0.034801249 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:47 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5171.\nJul 08 08:49:47 managed-node1 podman[33882]: 2025-07-08 08:49:47.695847177 -0400 EDT m=+0.083649362 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:47 managed-node1 podman[33882]: 2025-07-08 08:49:47.698223389 -0400 EDT m=+0.086025604 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:47 managed-node1 auth_test_1_kube-auth_test_1_kube[33894]: This container is intended for podman CI testing\nJul 08 08:49:47 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:47 managed-node1 conmon[33894]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:47 managed-node1 podman[33898]: 2025-07-08 08:49:47.729215233 -0400 EDT m=+0.019668273 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:47 managed-node1 podman[33898]: 2025-07-08 08:49:47.741919226 -0400 EDT m=+0.032372173 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:47 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5178.\nJul 08 08:49:47 managed-node1 podman[33898]: 2025-07-08 08:49:47.79163482 -0400 EDT m=+0.082087793 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:47 managed-node1 podman[33898]: 2025-07-08 08:49:47.793910139 -0400 EDT m=+0.084363124 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:47 managed-node1 auth_test_1_kube-auth_test_1_kube[33909]: This container is intended for podman CI testing\nJul 08 08:49:47 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:47 managed-node1 podman[33913]: 2025-07-08 08:49:47.824136159 -0400 EDT m=+0.020193567 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:47 managed-node1 podman[33913]: 2025-07-08 08:49:47.837239421 -0400 EDT m=+0.033296646 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:47 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5185.\nJul 08 08:49:47 managed-node1 podman[33913]: 2025-07-08 08:49:47.883698497 -0400 EDT m=+0.079755730 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:47 managed-node1 podman[33913]: 2025-07-08 08:49:47.885994806 -0400 EDT m=+0.082052053 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:47 managed-node1 auth_test_1_kube-auth_test_1_kube[33925]: This container is intended for podman CI testing\nJul 08 08:49:47 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:47 managed-node1 podman[33929]: 2025-07-08 08:49:47.920128655 -0400 EDT m=+0.020019483 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:47 managed-node1 podman[33929]: 2025-07-08 08:49:47.932768661 -0400 EDT m=+0.032659356 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:47 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5192.\nJul 08 08:49:47 managed-node1 podman[33929]: 2025-07-08 08:49:47.97598732 -0400 EDT m=+0.075878022 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:47 managed-node1 podman[33929]: 2025-07-08 08:49:47.978595153 -0400 EDT m=+0.078485910 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:47 managed-node1 auth_test_1_kube-auth_test_1_kube[33940]: This container is intended for podman CI testing\nJul 08 08:49:47 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:48 managed-node1 podman[33944]: 2025-07-08 08:49:48.011452469 -0400 EDT m=+0.020001856 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:48 managed-node1 podman[33944]: 2025-07-08 08:49:48.024031825 -0400 EDT m=+0.032581173 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:48 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5199.\nJul 08 08:49:48 managed-node1 podman[33944]: 2025-07-08 08:49:48.077377205 -0400 EDT m=+0.085926587 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:48 managed-node1 podman[33944]: 2025-07-08 08:49:48.080239495 -0400 EDT m=+0.088788882 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:48 managed-node1 auth_test_1_kube-auth_test_1_kube[33955]: This container is intended for podman CI testing\nJul 08 08:49:48 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:48 managed-node1 podman[33959]: 2025-07-08 08:49:48.114038805 -0400 EDT m=+0.020085840 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:48 managed-node1 podman[33959]: 2025-07-08 08:49:48.126637728 -0400 EDT m=+0.032684732 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:48 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5206.\nJul 08 08:49:48 managed-node1 podman[33959]: 2025-07-08 08:49:48.175852831 -0400 EDT m=+0.081899828 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:48 managed-node1 auth_test_1_kube-auth_test_1_kube[33970]: This container is intended for podman CI testing\nJul 08 08:49:48 managed-node1 podman[33959]: 2025-07-08 08:49:48.17838063 -0400 EDT m=+0.084427648 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:48 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:48 managed-node1 podman[33974]: 2025-07-08 08:49:48.209443354 -0400 EDT m=+0.019149410 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:48 managed-node1 podman[33974]: 2025-07-08 08:49:48.221899945 -0400 EDT m=+0.031605950 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:48 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5213.\nJul 08 08:49:48 managed-node1 podman[33974]: 2025-07-08 08:49:48.26397088 -0400 EDT m=+0.073676878 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:48 managed-node1 podman[33974]: 2025-07-08 08:49:48.266185294 -0400 EDT m=+0.075891305 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:48 managed-node1 auth_test_1_kube-auth_test_1_kube[33987]: This container is intended for podman CI testing\nJul 08 08:49:48 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:48 managed-node1 conmon[33987]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:48 managed-node1 podman[33991]: 2025-07-08 08:49:48.2997921 -0400 EDT m=+0.020653478 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:48 managed-node1 podman[33991]: 2025-07-08 08:49:48.312964173 -0400 EDT m=+0.033825456 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:48 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5220.\nJul 08 08:49:48 managed-node1 podman[33991]: 2025-07-08 08:49:48.366740991 -0400 EDT m=+0.087602299 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:48 managed-node1 auth_test_1_kube-auth_test_1_kube[34002]: This container is intended for podman CI testing\nJul 08 08:49:48 managed-node1 podman[33991]: 2025-07-08 08:49:48.369396108 -0400 EDT m=+0.090257515 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:48 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:48 managed-node1 podman[34006]: 2025-07-08 08:49:48.401963754 -0400 EDT m=+0.019984602 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:48 managed-node1 podman[34006]: 2025-07-08 08:49:48.414636071 -0400 EDT m=+0.032656853 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:48 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5227.\nJul 08 08:49:48 managed-node1 podman[34006]: 2025-07-08 08:49:48.463279998 -0400 EDT m=+0.081300798 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:48 managed-node1 auth_test_1_kube-auth_test_1_kube[34017]: This container is intended for podman CI testing\nJul 08 08:49:48 managed-node1 podman[34006]: 2025-07-08 08:49:48.465810417 -0400 EDT m=+0.083831250 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:48 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:48 managed-node1 podman[34021]: 2025-07-08 08:49:48.497279996 -0400 EDT m=+0.018834447 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:48 managed-node1 podman[34021]: 2025-07-08 08:49:48.510078876 -0400 EDT m=+0.031633279 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:48 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5234.\nJul 08 08:49:48 managed-node1 podman[34021]: 2025-07-08 08:49:48.562758804 -0400 EDT m=+0.084313210 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:48 managed-node1 podman[34021]: 2025-07-08 08:49:48.565079639 -0400 EDT m=+0.086634073 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:48 managed-node1 auth_test_1_kube-auth_test_1_kube[34032]: This container is intended for podman CI testing\nJul 08 08:49:48 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:48 managed-node1 podman[34036]: 2025-07-08 08:49:48.597345408 -0400 EDT m=+0.019006558 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:48 managed-node1 podman[34036]: 2025-07-08 08:49:48.609878343 -0400 EDT m=+0.031539338 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:48 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5241.\nJul 08 08:49:48 managed-node1 podman[34036]: 2025-07-08 08:49:48.651001519 -0400 EDT m=+0.072662517 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:48 managed-node1 podman[34036]: 2025-07-08 08:49:48.653626332 -0400 EDT m=+0.075287365 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:48 managed-node1 auth_test_1_kube-auth_test_1_kube[34047]: This container is intended for podman CI testing\nJul 08 08:49:48 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:48 managed-node1 podman[34051]: 2025-07-08 08:49:48.686199905 -0400 EDT m=+0.020077534 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:48 managed-node1 podman[34051]: 2025-07-08 08:49:48.698857155 -0400 EDT m=+0.032734721 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:48 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5248.\nJul 08 08:49:48 managed-node1 podman[34051]: 2025-07-08 08:49:48.75022344 -0400 EDT m=+0.084101010 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:48 managed-node1 podman[34051]: 2025-07-08 08:49:48.752541475 -0400 EDT m=+0.086419071 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:48 managed-node1 auth_test_1_kube-auth_test_1_kube[34063]: This container is intended for podman CI testing\nJul 08 08:49:48 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:48 managed-node1 podman[34067]: 2025-07-08 08:49:48.782506253 -0400 EDT m=+0.020585418 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:48 managed-node1 podman[34067]: 2025-07-08 08:49:48.794763903 -0400 EDT m=+0.032843049 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:48 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5255.\nJul 08 08:49:48 managed-node1 podman[34067]: 2025-07-08 08:49:48.839529999 -0400 EDT m=+0.077609197 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:48 managed-node1 podman[34067]: 2025-07-08 08:49:48.841788624 -0400 EDT m=+0.079867778 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:48 managed-node1 auth_test_1_kube-auth_test_1_kube[34079]: This container is intended for podman CI testing\nJul 08 08:49:48 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:48 managed-node1 podman[34083]: 2025-07-08 08:49:48.873954077 -0400 EDT m=+0.019815458 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:48 managed-node1 podman[34083]: 2025-07-08 08:49:48.886952201 -0400 EDT m=+0.032813564 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:48 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5262.\nJul 08 08:49:48 managed-node1 podman[34083]: 2025-07-08 08:49:48.939018792 -0400 EDT m=+0.084880219 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:48 managed-node1 podman[34083]: 2025-07-08 08:49:48.941284488 -0400 EDT m=+0.087145886 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:48 managed-node1 auth_test_1_kube-auth_test_1_kube[34094]: This container is intended for podman CI testing\nJul 08 08:49:48 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:48 managed-node1 podman[34098]: 2025-07-08 08:49:48.970114144 -0400 EDT m=+0.019376077 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:48 managed-node1 podman[34098]: 2025-07-08 08:49:48.982705486 -0400 EDT m=+0.031967451 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:49 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5269.\nJul 08 08:49:49 managed-node1 podman[34098]: 2025-07-08 08:49:49.034115159 -0400 EDT m=+0.083377181 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:49 managed-node1 podman[34098]: 2025-07-08 08:49:49.036373194 -0400 EDT m=+0.085635165 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:49 managed-node1 auth_test_1_kube-auth_test_1_kube[34110]: This container is intended for podman CI testing\nJul 08 08:49:49 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:49 managed-node1 podman[34114]: 2025-07-08 08:49:49.068177986 -0400 EDT m=+0.018833378 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:49 managed-node1 podman[34114]: 2025-07-08 08:49:49.080844182 -0400 EDT m=+0.031499518 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:49 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5276.\nJul 08 08:49:49 managed-node1 podman[34114]: 2025-07-08 08:49:49.132890916 -0400 EDT m=+0.083546268 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:49 managed-node1 podman[34114]: 2025-07-08 08:49:49.135916047 -0400 EDT m=+0.086571507 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:49 managed-node1 auth_test_1_kube-auth_test_1_kube[34125]: This container is intended for podman CI testing\nJul 08 08:49:49 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:49 managed-node1 podman[34130]: 2025-07-08 08:49:49.169663613 -0400 EDT m=+0.023219464 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:49 managed-node1 podman[34130]: 2025-07-08 08:49:49.182757517 -0400 EDT m=+0.036313167 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:49 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5283.\nJul 08 08:49:49 managed-node1 podman[34130]: 2025-07-08 08:49:49.226407952 -0400 EDT m=+0.079963608 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:49 managed-node1 auth_test_1_kube-auth_test_1_kube[34141]: This container is intended for podman CI testing\nJul 08 08:49:49 managed-node1 podman[34130]: 2025-07-08 08:49:49.229312932 -0400 EDT m=+0.082868620 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:49 managed-node1 conmon[34141]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:49 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:49 managed-node1 podman[34145]: 2025-07-08 08:49:49.259028664 -0400 EDT m=+0.020826764 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:49 managed-node1 podman[34145]: 2025-07-08 08:49:49.271800703 -0400 EDT m=+0.033598804 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:49 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5290.\nJul 08 08:49:49 managed-node1 podman[34145]: 2025-07-08 08:49:49.320079906 -0400 EDT m=+0.081878046 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:49 managed-node1 podman[34145]: 2025-07-08 08:49:49.322360297 -0400 EDT m=+0.084158397 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:49 managed-node1 auth_test_1_kube-auth_test_1_kube[34158]: This container is intended for podman CI testing\nJul 08 08:49:49 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:49 managed-node1 conmon[34158]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:49 managed-node1 podman[34162]: 2025-07-08 08:49:49.354181891 -0400 EDT m=+0.019486538 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:49 managed-node1 podman[34162]: 2025-07-08 08:49:49.366999551 -0400 EDT m=+0.032304118 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:49 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5297.\nJul 08 08:49:49 managed-node1 podman[34162]: 2025-07-08 08:49:49.412204529 -0400 EDT m=+0.077509139 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:49 managed-node1 podman[34162]: 2025-07-08 08:49:49.414519645 -0400 EDT m=+0.079824263 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:49 managed-node1 auth_test_1_kube-auth_test_1_kube[34174]: This container is intended for podman CI testing\nJul 08 08:49:49 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:49 managed-node1 podman[34178]: 2025-07-08 08:49:49.448006888 -0400 EDT m=+0.020382843 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:49 managed-node1 podman[34178]: 2025-07-08 08:49:49.460572271 -0400 EDT m=+0.032948211 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:49 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5304.\nJul 08 08:49:49 managed-node1 podman[34178]: 2025-07-08 08:49:49.504582378 -0400 EDT m=+0.076958491 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:49 managed-node1 podman[34178]: 2025-07-08 08:49:49.508391375 -0400 EDT m=+0.080767425 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:49 managed-node1 auth_test_1_kube-auth_test_1_kube[34190]: This container is intended for podman CI testing\nJul 08 08:49:49 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:49 managed-node1 podman[34202]: 2025-07-08 08:49:49.5517187 -0400 EDT m=+0.025513816 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:49 managed-node1 podman[34202]: 2025-07-08 08:49:49.565640744 -0400 EDT m=+0.039435731 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:49 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5311.\nJul 08 08:49:49 managed-node1 podman[34202]: 2025-07-08 08:49:49.620244581 -0400 EDT m=+0.094039639 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:49 managed-node1 podman[34202]: 2025-07-08 08:49:49.622807718 -0400 EDT m=+0.096602697 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:49 managed-node1 auth_test_1_kube-auth_test_1_kube[34263]: This container is intended for podman CI testing\nJul 08 08:49:49 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:49 managed-node1 podman[34288]: 2025-07-08 08:49:49.672425537 -0400 EDT m=+0.036848435 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:49 managed-node1 podman[34288]: 2025-07-08 08:49:49.68658677 -0400 EDT m=+0.051009922 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:49 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5318.\nJul 08 08:49:49 managed-node1 podman[34288]: 2025-07-08 08:49:49.756107113 -0400 EDT m=+0.120530428 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:49 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:49 managed-node1 auth_test_1_kube-auth_test_1_kube[34351]: This container is intended for podman CI testing\nJul 08 08:49:49 managed-node1 conmon[34351]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:49 managed-node1 podman[34288]: 2025-07-08 08:49:49.762543671 -0400 EDT m=+0.126966670 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:49 managed-node1 podman[34355]: 2025-07-08 08:49:49.81243902 -0400 EDT m=+0.035935072 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:49 managed-node1 podman[34355]: 2025-07-08 08:49:49.827940145 -0400 EDT m=+0.051436097 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:49 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5325.\nJul 08 08:49:49 managed-node1 python3.12[34349]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:49:49 managed-node1 podman[34355]: 2025-07-08 08:49:49.90705641 -0400 EDT m=+0.130552597 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:49 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:49 managed-node1 podman[34355]: 2025-07-08 08:49:49.912203202 -0400 EDT m=+0.135699177 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:49 managed-node1 auth_test_1_kube-auth_test_1_kube[34367]: This container is intended for podman CI testing\nJul 08 08:49:49 managed-node1 conmon[34367]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:49 managed-node1 podman[34373]: 2025-07-08 08:49:49.952833053 -0400 EDT m=+0.025054188 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:49 managed-node1 podman[34373]: 2025-07-08 08:49:49.966408755 -0400 EDT m=+0.038629636 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:50 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5332.\nJul 08 08:49:50 managed-node1 podman[34373]: 2025-07-08 08:49:50.01387811 -0400 EDT m=+0.086099048 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:50 managed-node1 podman[34373]: 2025-07-08 08:49:50.016837306 -0400 EDT m=+0.089058241 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:50 managed-node1 auth_test_1_kube-auth_test_1_kube[34409]: This container is intended for podman CI testing\nJul 08 08:49:50 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:50 managed-node1 conmon[34409]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:50 managed-node1 podman[34413]: 2025-07-08 08:49:50.044603388 -0400 EDT m=+0.018237094 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:50 managed-node1 podman[34413]: 2025-07-08 08:49:50.057419606 -0400 EDT m=+0.031053305 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:50 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5339.\nJul 08 08:49:50 managed-node1 podman[34413]: 2025-07-08 08:49:50.106411777 -0400 EDT m=+0.080045470 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:50 managed-node1 podman[34413]: 2025-07-08 08:49:50.109023029 -0400 EDT m=+0.082656864 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:50 managed-node1 auth_test_1_kube-auth_test_1_kube[34425]: This container is intended for podman CI testing\nJul 08 08:49:50 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:50 managed-node1 podman[34429]: 2025-07-08 08:49:50.14027844 -0400 EDT m=+0.021282662 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:50 managed-node1 podman[34429]: 2025-07-08 08:49:50.152958137 -0400 EDT m=+0.033962260 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:50 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5346.\nJul 08 08:49:50 managed-node1 podman[34429]: 2025-07-08 08:49:50.198946969 -0400 EDT m=+0.079951112 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:50 managed-node1 podman[34429]: 2025-07-08 08:49:50.201247195 -0400 EDT m=+0.082251363 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:50 managed-node1 auth_test_1_kube-auth_test_1_kube[34441]: This container is intended for podman CI testing\nJul 08 08:49:50 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:50 managed-node1 podman[34445]: 2025-07-08 08:49:50.232670019 -0400 EDT m=+0.019042210 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:50 managed-node1 podman[34445]: 2025-07-08 08:49:50.245569477 -0400 EDT m=+0.031941586 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:50 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5353.\nJul 08 08:49:50 managed-node1 podman[34445]: 2025-07-08 08:49:50.289153121 -0400 EDT m=+0.075525288 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:50 managed-node1 podman[34445]: 2025-07-08 08:49:50.29144807 -0400 EDT m=+0.077820446 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:50 managed-node1 auth_test_1_kube-auth_test_1_kube[34457]: This container is intended for podman CI testing\nJul 08 08:49:50 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:50 managed-node1 conmon[34457]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:50 managed-node1 podman[34461]: 2025-07-08 08:49:50.321324859 -0400 EDT m=+0.020135867 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:50 managed-node1 podman[34461]: 2025-07-08 08:49:50.33434929 -0400 EDT m=+0.033160267 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:50 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5360.\nJul 08 08:49:50 managed-node1 podman[34461]: 2025-07-08 08:49:50.378924546 -0400 EDT m=+0.077735626 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:50 managed-node1 podman[34461]: 2025-07-08 08:49:50.381208079 -0400 EDT m=+0.080019250 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:50 managed-node1 auth_test_1_kube-auth_test_1_kube[34473]: This container is intended for podman CI testing\nJul 08 08:49:50 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:50 managed-node1 conmon[34473]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:50 managed-node1 podman[34477]: 2025-07-08 08:49:50.413602604 -0400 EDT m=+0.020596862 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:50 managed-node1 podman[34477]: 2025-07-08 08:49:50.426273388 -0400 EDT m=+0.033267586 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:50 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5367.\nJul 08 08:49:50 managed-node1 podman[34477]: 2025-07-08 08:49:50.482970616 -0400 EDT m=+0.089964814 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:50 managed-node1 podman[34477]: 2025-07-08 08:49:50.486156553 -0400 EDT m=+0.093151125 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:50 managed-node1 auth_test_1_kube-auth_test_1_kube[34488]: This container is intended for podman CI testing\nJul 08 08:49:50 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:50 managed-node1 podman[34492]: 2025-07-08 08:49:50.515366939 -0400 EDT m=+0.019844643 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:50 managed-node1 podman[34492]: 2025-07-08 08:49:50.527762341 -0400 EDT m=+0.032240027 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:50 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5374.\nJul 08 08:49:50 managed-node1 podman[34492]: 2025-07-08 08:49:50.574853568 -0400 EDT m=+0.079331256 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:50 managed-node1 podman[34492]: 2025-07-08 08:49:50.577156975 -0400 EDT m=+0.081634701 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:50 managed-node1 auth_test_1_kube-auth_test_1_kube[34503]: This container is intended for podman CI testing\nJul 08 08:49:50 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:50 managed-node1 podman[34507]: 2025-07-08 08:49:50.610108896 -0400 EDT m=+0.019722511 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:50 managed-node1 podman[34507]: 2025-07-08 08:49:50.622705094 -0400 EDT m=+0.032318590 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:50 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5381.\nJul 08 08:49:50 managed-node1 podman[34507]: 2025-07-08 08:49:50.668869723 -0400 EDT m=+0.078483353 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:50 managed-node1 podman[34507]: 2025-07-08 08:49:50.672170263 -0400 EDT m=+0.081783833 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:50 managed-node1 auth_test_1_kube-auth_test_1_kube[34518]: This container is intended for podman CI testing\nJul 08 08:49:50 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:50 managed-node1 podman[34522]: 2025-07-08 08:49:50.707159048 -0400 EDT m=+0.022341059 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:50 managed-node1 podman[34522]: 2025-07-08 08:49:50.719997876 -0400 EDT m=+0.035179885 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:50 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5388.\nJul 08 08:49:50 managed-node1 podman[34522]: 2025-07-08 08:49:50.763582018 -0400 EDT m=+0.078764019 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:50 managed-node1 podman[34522]: 2025-07-08 08:49:50.765810113 -0400 EDT m=+0.080992132 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:50 managed-node1 auth_test_1_kube-auth_test_1_kube[34533]: This container is intended for podman CI testing\nJul 08 08:49:50 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:50 managed-node1 podman[34537]: 2025-07-08 08:49:50.79394865 -0400 EDT m=+0.018973088 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:50 managed-node1 podman[34537]: 2025-07-08 08:49:50.807084965 -0400 EDT m=+0.032109455 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:50 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5395.\nJul 08 08:49:50 managed-node1 podman[34537]: 2025-07-08 08:49:50.851272783 -0400 EDT m=+0.076297235 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:50 managed-node1 podman[34537]: 2025-07-08 08:49:50.853483802 -0400 EDT m=+0.078508269 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:50 managed-node1 auth_test_1_kube-auth_test_1_kube[34548]: This container is intended for podman CI testing\nJul 08 08:49:50 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:50 managed-node1 podman[34552]: 2025-07-08 08:49:50.885571405 -0400 EDT m=+0.019189971 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:50 managed-node1 podman[34552]: 2025-07-08 08:49:50.898112952 -0400 EDT m=+0.031731451 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:50 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5402.\nJul 08 08:49:50 managed-node1 podman[34552]: 2025-07-08 08:49:50.944736716 -0400 EDT m=+0.078355266 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:50 managed-node1 podman[34552]: 2025-07-08 08:49:50.947515836 -0400 EDT m=+0.081134365 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:50 managed-node1 auth_test_1_kube-auth_test_1_kube[34563]: This container is intended for podman CI testing\nJul 08 08:49:50 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:50 managed-node1 podman[34567]: 2025-07-08 08:49:50.977984989 -0400 EDT m=+0.020979643 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:50 managed-node1 podman[34567]: 2025-07-08 08:49:50.990807822 -0400 EDT m=+0.033802272 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:51 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5409.\nJul 08 08:49:51 managed-node1 podman[34567]: 2025-07-08 08:49:51.036679245 -0400 EDT m=+0.079673683 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:51 managed-node1 podman[34567]: 2025-07-08 08:49:51.038959012 -0400 EDT m=+0.081953462 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:51 managed-node1 auth_test_1_kube-auth_test_1_kube[34578]: This container is intended for podman CI testing\nJul 08 08:49:51 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:51 managed-node1 podman[34582]: 2025-07-08 08:49:51.070108649 -0400 EDT m=+0.021434065 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:51 managed-node1 podman[34582]: 2025-07-08 08:49:51.082800889 -0400 EDT m=+0.034126324 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:51 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5416.\nJul 08 08:49:51 managed-node1 podman[34582]: 2025-07-08 08:49:51.134760642 -0400 EDT m=+0.086086140 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:51 managed-node1 podman[34582]: 2025-07-08 08:49:51.137199535 -0400 EDT m=+0.088525117 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:51 managed-node1 auth_test_1_kube-auth_test_1_kube[34593]: This container is intended for podman CI testing\nJul 08 08:49:51 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:51 managed-node1 podman[34597]: 2025-07-08 08:49:51.170718179 -0400 EDT m=+0.020132858 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:51 managed-node1 podman[34597]: 2025-07-08 08:49:51.18270693 -0400 EDT m=+0.032121342 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:51 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5423.\nJul 08 08:49:51 managed-node1 podman[34597]: 2025-07-08 08:49:51.229909139 -0400 EDT m=+0.079323558 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:51 managed-node1 podman[34597]: 2025-07-08 08:49:51.232549006 -0400 EDT m=+0.081963536 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:51 managed-node1 auth_test_1_kube-auth_test_1_kube[34608]: This container is intended for podman CI testing\nJul 08 08:49:51 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:51 managed-node1 podman[34612]: 2025-07-08 08:49:51.261318869 -0400 EDT m=+0.019791525 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:51 managed-node1 podman[34612]: 2025-07-08 08:49:51.27393467 -0400 EDT m=+0.032407237 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:51 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5430.\nJul 08 08:49:51 managed-node1 podman[34612]: 2025-07-08 08:49:51.322707529 -0400 EDT m=+0.081180097 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:51 managed-node1 podman[34612]: 2025-07-08 08:49:51.324957676 -0400 EDT m=+0.083430360 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:51 managed-node1 auth_test_1_kube-auth_test_1_kube[34623]: This container is intended for podman CI testing\nJul 08 08:49:51 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:51 managed-node1 conmon[34623]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:51 managed-node1 podman[34628]: 2025-07-08 08:49:51.357302871 -0400 EDT m=+0.019873605 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:51 managed-node1 podman[34628]: 2025-07-08 08:49:51.369677187 -0400 EDT m=+0.032247848 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:51 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5437.\nJul 08 08:49:51 managed-node1 podman[34628]: 2025-07-08 08:49:51.412764957 -0400 EDT m=+0.075335673 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:51 managed-node1 podman[34628]: 2025-07-08 08:49:51.415079414 -0400 EDT m=+0.077650213 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:51 managed-node1 auth_test_1_kube-auth_test_1_kube[34639]: This container is intended for podman CI testing\nJul 08 08:49:51 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:51 managed-node1 conmon[34639]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:51 managed-node1 podman[34644]: 2025-07-08 08:49:51.450177559 -0400 EDT m=+0.024940839 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:51 managed-node1 podman[34644]: 2025-07-08 08:49:51.46464424 -0400 EDT m=+0.039407266 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:51 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5444.\nJul 08 08:49:51 managed-node1 podman[34644]: 2025-07-08 08:49:51.522426136 -0400 EDT m=+0.097189367 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:51 managed-node1 auth_test_1_kube-auth_test_1_kube[34700]: This container is intended for podman CI testing\nJul 08 08:49:51 managed-node1 podman[34644]: 2025-07-08 08:49:51.526546755 -0400 EDT m=+0.101309701 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:51 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:51 managed-node1 podman[34726]: 2025-07-08 08:49:51.567774571 -0400 EDT m=+0.026283228 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:51 managed-node1 podman[34726]: 2025-07-08 08:49:51.582173666 -0400 EDT m=+0.040682285 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:51 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5451.\nJul 08 08:49:51 managed-node1 podman[34726]: 2025-07-08 08:49:51.646536272 -0400 EDT m=+0.105045205 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:51 managed-node1 auth_test_1_kube-auth_test_1_kube[34782]: This container is intended for podman CI testing\nJul 08 08:49:51 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:51 managed-node1 conmon[34782]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:51 managed-node1 podman[34726]: 2025-07-08 08:49:51.651445419 -0400 EDT m=+0.109954130 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:51 managed-node1 podman[34804]: 2025-07-08 08:49:51.700497825 -0400 EDT m=+0.032592195 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:51 managed-node1 podman[34804]: 2025-07-08 08:49:51.713819615 -0400 EDT m=+0.045913893 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:51 managed-node1 python3.12[34802]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:49:51 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5458.\nJul 08 08:49:51 managed-node1 podman[34804]: 2025-07-08 08:49:51.824761802 -0400 EDT m=+0.156856211 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:51 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:51 managed-node1 auth_test_1_kube-auth_test_1_kube[34815]: This container is intended for podman CI testing\nJul 08 08:49:51 managed-node1 conmon[34815]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:51 managed-node1 podman[34804]: 2025-07-08 08:49:51.831023939 -0400 EDT m=+0.163118200 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:51 managed-node1 podman[34821]: 2025-07-08 08:49:51.872133012 -0400 EDT m=+0.031590733 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:51 managed-node1 podman[34821]: 2025-07-08 08:49:51.885646271 -0400 EDT m=+0.045103780 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:51 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5465.\nJul 08 08:49:51 managed-node1 podman[34821]: 2025-07-08 08:49:51.932911436 -0400 EDT m=+0.092369102 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:51 managed-node1 podman[34821]: 2025-07-08 08:49:51.937824433 -0400 EDT m=+0.097282013 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:51 managed-node1 auth_test_1_kube-auth_test_1_kube[34857]: This container is intended for podman CI testing\nJul 08 08:49:51 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:51 managed-node1 conmon[34857]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:51 managed-node1 podman[34861]: 2025-07-08 08:49:51.970024669 -0400 EDT m=+0.020063434 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:51 managed-node1 podman[34861]: 2025-07-08 08:49:51.982530604 -0400 EDT m=+0.032569401 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:52 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5472.\nJul 08 08:49:52 managed-node1 podman[34861]: 2025-07-08 08:49:52.029649096 -0400 EDT m=+0.079687898 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:52 managed-node1 podman[34861]: 2025-07-08 08:49:52.031897225 -0400 EDT m=+0.081936106 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:52 managed-node1 auth_test_1_kube-auth_test_1_kube[34873]: This container is intended for podman CI testing\nJul 08 08:49:52 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:52 managed-node1 podman[34877]: 2025-07-08 08:49:52.064301272 -0400 EDT m=+0.019236559 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:52 managed-node1 podman[34877]: 2025-07-08 08:49:52.076969282 -0400 EDT m=+0.031904537 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:52 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5479.\nJul 08 08:49:52 managed-node1 podman[34877]: 2025-07-08 08:49:52.125426809 -0400 EDT m=+0.080362136 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:52 managed-node1 podman[34877]: 2025-07-08 08:49:52.1278094 -0400 EDT m=+0.082744802 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:52 managed-node1 auth_test_1_kube-auth_test_1_kube[34888]: This container is intended for podman CI testing\nJul 08 08:49:52 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:52 managed-node1 conmon[34888]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:52 managed-node1 podman[34892]: 2025-07-08 08:49:52.157661918 -0400 EDT m=+0.020301566 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:52 managed-node1 podman[34892]: 2025-07-08 08:49:52.170252412 -0400 EDT m=+0.032891993 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:52 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5486.\nJul 08 08:49:52 managed-node1 podman[34892]: 2025-07-08 08:49:52.222263041 -0400 EDT m=+0.084902632 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:52 managed-node1 podman[34892]: 2025-07-08 08:49:52.224496917 -0400 EDT m=+0.087136533 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:52 managed-node1 auth_test_1_kube-auth_test_1_kube[34904]: This container is intended for podman CI testing\nJul 08 08:49:52 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:52 managed-node1 podman[34908]: 2025-07-08 08:49:52.252564496 -0400 EDT m=+0.019618489 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:52 managed-node1 podman[34908]: 2025-07-08 08:49:52.265004193 -0400 EDT m=+0.032058104 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:52 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5493.\nJul 08 08:49:52 managed-node1 podman[34908]: 2025-07-08 08:49:52.315701532 -0400 EDT m=+0.082755442 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:52 managed-node1 podman[34908]: 2025-07-08 08:49:52.31797177 -0400 EDT m=+0.085025729 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:52 managed-node1 auth_test_1_kube-auth_test_1_kube[34920]: This container is intended for podman CI testing\nJul 08 08:49:52 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:52 managed-node1 podman[34924]: 2025-07-08 08:49:52.349729488 -0400 EDT m=+0.019737581 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:52 managed-node1 podman[34924]: 2025-07-08 08:49:52.362572499 -0400 EDT m=+0.032580554 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:52 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5500.\nJul 08 08:49:52 managed-node1 podman[34924]: 2025-07-08 08:49:52.412063084 -0400 EDT m=+0.082071241 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:52 managed-node1 podman[34924]: 2025-07-08 08:49:52.414363606 -0400 EDT m=+0.084371723 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:52 managed-node1 auth_test_1_kube-auth_test_1_kube[34935]: This container is intended for podman CI testing\nJul 08 08:49:52 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:52 managed-node1 podman[34939]: 2025-07-08 08:49:52.448029008 -0400 EDT m=+0.020726318 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:52 managed-node1 podman[34939]: 2025-07-08 08:49:52.460699146 -0400 EDT m=+0.033396467 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:52 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5507.\nJul 08 08:49:52 managed-node1 podman[34939]: 2025-07-08 08:49:52.511935791 -0400 EDT m=+0.084633119 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:52 managed-node1 podman[34939]: 2025-07-08 08:49:52.514269499 -0400 EDT m=+0.086966874 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:52 managed-node1 auth_test_1_kube-auth_test_1_kube[34951]: This container is intended for podman CI testing\nJul 08 08:49:52 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:52 managed-node1 podman[34955]: 2025-07-08 08:49:52.547364457 -0400 EDT m=+0.019949573 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:52 managed-node1 podman[34955]: 2025-07-08 08:49:52.559869428 -0400 EDT m=+0.032454533 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:52 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5514.\nJul 08 08:49:52 managed-node1 podman[34955]: 2025-07-08 08:49:52.604272195 -0400 EDT m=+0.076857302 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:52 managed-node1 podman[34955]: 2025-07-08 08:49:52.607259927 -0400 EDT m=+0.079845062 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:52 managed-node1 auth_test_1_kube-auth_test_1_kube[34966]: This container is intended for podman CI testing\nJul 08 08:49:52 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:52 managed-node1 conmon[34966]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:52 managed-node1 podman[34970]: 2025-07-08 08:49:52.636508232 -0400 EDT m=+0.019579286 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:52 managed-node1 podman[34970]: 2025-07-08 08:49:52.649916826 -0400 EDT m=+0.032987855 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:52 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5521.\nJul 08 08:49:52 managed-node1 podman[34970]: 2025-07-08 08:49:52.694096176 -0400 EDT m=+0.077167300 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:52 managed-node1 podman[34970]: 2025-07-08 08:49:52.696375296 -0400 EDT m=+0.079446384 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:52 managed-node1 auth_test_1_kube-auth_test_1_kube[34982]: This container is intended for podman CI testing\nJul 08 08:49:52 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:52 managed-node1 podman[34986]: 2025-07-08 08:49:52.727887217 -0400 EDT m=+0.020636003 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:52 managed-node1 podman[34986]: 2025-07-08 08:49:52.7406964 -0400 EDT m=+0.033445178 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:52 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5528.\nJul 08 08:49:52 managed-node1 podman[34986]: 2025-07-08 08:49:52.785223061 -0400 EDT m=+0.077971841 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:52 managed-node1 podman[34986]: 2025-07-08 08:49:52.787501377 -0400 EDT m=+0.080250208 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:52 managed-node1 auth_test_1_kube-auth_test_1_kube[34998]: This container is intended for podman CI testing\nJul 08 08:49:52 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:52 managed-node1 podman[35002]: 2025-07-08 08:49:52.816356887 -0400 EDT m=+0.020080054 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:52 managed-node1 podman[35002]: 2025-07-08 08:49:52.829281424 -0400 EDT m=+0.033004541 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:52 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5535.\nJul 08 08:49:52 managed-node1 podman[35002]: 2025-07-08 08:49:52.881723432 -0400 EDT m=+0.085446556 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:52 managed-node1 podman[35002]: 2025-07-08 08:49:52.883942315 -0400 EDT m=+0.087665481 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:52 managed-node1 auth_test_1_kube-auth_test_1_kube[35013]: This container is intended for podman CI testing\nJul 08 08:49:52 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:52 managed-node1 podman[35017]: 2025-07-08 08:49:52.915706329 -0400 EDT m=+0.019774268 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:52 managed-node1 podman[35017]: 2025-07-08 08:49:52.927707601 -0400 EDT m=+0.031775518 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:52 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5542.\nJul 08 08:49:52 managed-node1 podman[35017]: 2025-07-08 08:49:52.970528 -0400 EDT m=+0.074596004 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:52 managed-node1 podman[35017]: 2025-07-08 08:49:52.973158654 -0400 EDT m=+0.077226615 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:52 managed-node1 auth_test_1_kube-auth_test_1_kube[35028]: This container is intended for podman CI testing\nJul 08 08:49:52 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:53 managed-node1 podman[35032]: 2025-07-08 08:49:53.001433921 -0400 EDT m=+0.020038412 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:53 managed-node1 podman[35032]: 2025-07-08 08:49:53.013954542 -0400 EDT m=+0.032558988 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:53 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5549.\nJul 08 08:49:53 managed-node1 podman[35032]: 2025-07-08 08:49:53.064806574 -0400 EDT m=+0.083411031 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:53 managed-node1 podman[35032]: 2025-07-08 08:49:53.067083007 -0400 EDT m=+0.085687494 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:53 managed-node1 auth_test_1_kube-auth_test_1_kube[35043]: This container is intended for podman CI testing\nJul 08 08:49:53 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:53 managed-node1 podman[35047]: 2025-07-08 08:49:53.097740963 -0400 EDT m=+0.020759320 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:53 managed-node1 podman[35047]: 2025-07-08 08:49:53.11056486 -0400 EDT m=+0.033583200 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:53 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5556.\nJul 08 08:49:53 managed-node1 podman[35047]: 2025-07-08 08:49:53.159763414 -0400 EDT m=+0.082781770 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:53 managed-node1 podman[35047]: 2025-07-08 08:49:53.162006083 -0400 EDT m=+0.085024488 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:53 managed-node1 auth_test_1_kube-auth_test_1_kube[35058]: This container is intended for podman CI testing\nJul 08 08:49:53 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:53 managed-node1 conmon[35058]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:53 managed-node1 podman[35062]: 2025-07-08 08:49:53.193654904 -0400 EDT m=+0.019439840 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:53 managed-node1 podman[35062]: 2025-07-08 08:49:53.206217556 -0400 EDT m=+0.032002451 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:53 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5563.\nJul 08 08:49:53 managed-node1 podman[35062]: 2025-07-08 08:49:53.26219104 -0400 EDT m=+0.087975941 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:53 managed-node1 podman[35062]: 2025-07-08 08:49:53.267010852 -0400 EDT m=+0.092795806 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:53 managed-node1 auth_test_1_kube-auth_test_1_kube[35096]: This container is intended for podman CI testing\nJul 08 08:49:53 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:53 managed-node1 podman[35120]: 2025-07-08 08:49:53.307197324 -0400 EDT m=+0.025129594 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:53 managed-node1 podman[35120]: 2025-07-08 08:49:53.321875099 -0400 EDT m=+0.039807296 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:53 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5570.\nJul 08 08:49:53 managed-node1 podman[35120]: 2025-07-08 08:49:53.373402715 -0400 EDT m=+0.091335406 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:53 managed-node1 podman[35120]: 2025-07-08 08:49:53.376403871 -0400 EDT m=+0.094335992 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:53 managed-node1 auth_test_1_kube-auth_test_1_kube[35167]: This container is intended for podman CI testing\nJul 08 08:49:53 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:53 managed-node1 podman[35196]: 2025-07-08 08:49:53.423124804 -0400 EDT m=+0.031110290 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:53 managed-node1 podman[35196]: 2025-07-08 08:49:53.439764954 -0400 EDT m=+0.047750144 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:53 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5577.\nJul 08 08:49:53 managed-node1 podman[35196]: 2025-07-08 08:49:53.534882508 -0400 EDT m=+0.142867772 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:53 managed-node1 auth_test_1_kube-auth_test_1_kube[35234]: This container is intended for podman CI testing\nJul 08 08:49:53 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:53 managed-node1 podman[35196]: 2025-07-08 08:49:53.54100085 -0400 EDT m=+0.148985897 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:53 managed-node1 conmon[35234]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:53 managed-node1 podman[35238]: 2025-07-08 08:49:53.585159651 -0400 EDT m=+0.030526617 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:53 managed-node1 python3.12[35229]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:49:53 managed-node1 podman[35238]: 2025-07-08 08:49:53.600630294 -0400 EDT m=+0.045997182 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:53 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5584.\nJul 08 08:49:53 managed-node1 podman[35238]: 2025-07-08 08:49:53.654569108 -0400 EDT m=+0.099936073 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:53 managed-node1 podman[35238]: 2025-07-08 08:49:53.658200253 -0400 EDT m=+0.103567101 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:53 managed-node1 auth_test_1_kube-auth_test_1_kube[35251]: This container is intended for podman CI testing\nJul 08 08:49:53 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:53 managed-node1 podman[35274]: 2025-07-08 08:49:53.692985719 -0400 EDT m=+0.022754624 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:53 managed-node1 podman[35274]: 2025-07-08 08:49:53.705345161 -0400 EDT m=+0.035114032 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:53 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5591.\nJul 08 08:49:53 managed-node1 podman[35274]: 2025-07-08 08:49:53.753180763 -0400 EDT m=+0.082949635 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:53 managed-node1 podman[35274]: 2025-07-08 08:49:53.755398526 -0400 EDT m=+0.085167433 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:53 managed-node1 auth_test_1_kube-auth_test_1_kube[35292]: This container is intended for podman CI testing\nJul 08 08:49:53 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:53 managed-node1 podman[35296]: 2025-07-08 08:49:53.788253959 -0400 EDT m=+0.020142780 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:53 managed-node1 podman[35296]: 2025-07-08 08:49:53.80170614 -0400 EDT m=+0.033594946 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:53 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5598.\nJul 08 08:49:53 managed-node1 podman[35296]: 2025-07-08 08:49:53.849098303 -0400 EDT m=+0.080987255 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:53 managed-node1 podman[35296]: 2025-07-08 08:49:53.851335171 -0400 EDT m=+0.083224015 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:53 managed-node1 auth_test_1_kube-auth_test_1_kube[35308]: This container is intended for podman CI testing\nJul 08 08:49:53 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:53 managed-node1 podman[35312]: 2025-07-08 08:49:53.884555507 -0400 EDT m=+0.019570948 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:53 managed-node1 podman[35312]: 2025-07-08 08:49:53.896749294 -0400 EDT m=+0.031764716 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:53 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5605.\nJul 08 08:49:53 managed-node1 podman[35312]: 2025-07-08 08:49:53.945718218 -0400 EDT m=+0.080733636 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:53 managed-node1 podman[35312]: 2025-07-08 08:49:53.949025596 -0400 EDT m=+0.084041052 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:53 managed-node1 auth_test_1_kube-auth_test_1_kube[35323]: This container is intended for podman CI testing\nJul 08 08:49:53 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:53 managed-node1 podman[35327]: 2025-07-08 08:49:53.97821992 -0400 EDT m=+0.020059326 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:53 managed-node1 podman[35327]: 2025-07-08 08:49:53.99071089 -0400 EDT m=+0.032550283 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:54 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5612.\nJul 08 08:49:54 managed-node1 podman[35327]: 2025-07-08 08:49:54.031033777 -0400 EDT m=+0.072873181 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:54 managed-node1 podman[35327]: 2025-07-08 08:49:54.033292818 -0400 EDT m=+0.075132290 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:54 managed-node1 auth_test_1_kube-auth_test_1_kube[35339]: This container is intended for podman CI testing\nJul 08 08:49:54 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:54 managed-node1 podman[35343]: 2025-07-08 08:49:54.062431341 -0400 EDT m=+0.020319855 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:54 managed-node1 podman[35343]: 2025-07-08 08:49:54.074918588 -0400 EDT m=+0.032807096 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:54 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5619.\nJul 08 08:49:54 managed-node1 podman[35343]: 2025-07-08 08:49:54.127795758 -0400 EDT m=+0.085684274 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:54 managed-node1 auth_test_1_kube-auth_test_1_kube[35355]: This container is intended for podman CI testing\nJul 08 08:49:54 managed-node1 podman[35343]: 2025-07-08 08:49:54.130515559 -0400 EDT m=+0.088404096 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:54 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:54 managed-node1 podman[35359]: 2025-07-08 08:49:54.163020175 -0400 EDT m=+0.019485815 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:54 managed-node1 podman[35359]: 2025-07-08 08:49:54.175914914 -0400 EDT m=+0.032380598 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:54 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5626.\nJul 08 08:49:54 managed-node1 podman[35359]: 2025-07-08 08:49:54.235781563 -0400 EDT m=+0.092247283 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:54 managed-node1 podman[35359]: 2025-07-08 08:49:54.240035881 -0400 EDT m=+0.096501608 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:54 managed-node1 auth_test_1_kube-auth_test_1_kube[35370]: This container is intended for podman CI testing\nJul 08 08:49:54 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:54 managed-node1 conmon[35370]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:54 managed-node1 podman[35391]: 2025-07-08 08:49:54.285368099 -0400 EDT m=+0.028779315 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:54 managed-node1 podman[35391]: 2025-07-08 08:49:54.299881247 -0400 EDT m=+0.043292241 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:54 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5633.\nJul 08 08:49:54 managed-node1 podman[35391]: 2025-07-08 08:49:54.354916875 -0400 EDT m=+0.098327991 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:54 managed-node1 podman[35391]: 2025-07-08 08:49:54.357432697 -0400 EDT m=+0.100843625 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:54 managed-node1 auth_test_1_kube-auth_test_1_kube[35448]: This container is intended for podman CI testing\nJul 08 08:49:54 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:54 managed-node1 podman[35468]: 2025-07-08 08:49:54.396590414 -0400 EDT m=+0.030313114 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:54 managed-node1 podman[35468]: 2025-07-08 08:49:54.409494778 -0400 EDT m=+0.043216848 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:54 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5640.\nJul 08 08:49:54 managed-node1 podman[35468]: 2025-07-08 08:49:54.469857567 -0400 EDT m=+0.103579579 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:54 managed-node1 auth_test_1_kube-auth_test_1_kube[35528]: This container is intended for podman CI testing\nJul 08 08:49:54 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:54 managed-node1 podman[35468]: 2025-07-08 08:49:54.474841998 -0400 EDT m=+0.108564065 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:54 managed-node1 conmon[35528]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:54 managed-node1 podman[35535]: 2025-07-08 08:49:54.525306513 -0400 EDT m=+0.035156287 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:54 managed-node1 podman[35535]: 2025-07-08 08:49:54.539009865 -0400 EDT m=+0.048859754 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:54 managed-node1 python3.12[35533]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:49:54 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5647.\nJul 08 08:49:54 managed-node1 podman[35535]: 2025-07-08 08:49:54.639294004 -0400 EDT m=+0.149143874 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:54 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:54 managed-node1 auth_test_1_kube-auth_test_1_kube[35547]: This container is intended for podman CI testing\nJul 08 08:49:54 managed-node1 conmon[35547]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:54 managed-node1 podman[35535]: 2025-07-08 08:49:54.645180192 -0400 EDT m=+0.155029937 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:54 managed-node1 podman[35552]: 2025-07-08 08:49:54.688064641 -0400 EDT m=+0.031133612 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:54 managed-node1 podman[35552]: 2025-07-08 08:49:54.702336193 -0400 EDT m=+0.045404917 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:54 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5654.\nJul 08 08:49:54 managed-node1 podman[35552]: 2025-07-08 08:49:54.753639417 -0400 EDT m=+0.096708160 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:54 managed-node1 podman[35552]: 2025-07-08 08:49:54.755959748 -0400 EDT m=+0.099028531 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:54 managed-node1 auth_test_1_kube-auth_test_1_kube[35588]: This container is intended for podman CI testing\nJul 08 08:49:54 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:54 managed-node1 podman[35592]: 2025-07-08 08:49:54.788654551 -0400 EDT m=+0.019611415 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:54 managed-node1 podman[35592]: 2025-07-08 08:49:54.801611928 -0400 EDT m=+0.032568690 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:54 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5661.\nJul 08 08:49:54 managed-node1 podman[35592]: 2025-07-08 08:49:54.849621261 -0400 EDT m=+0.080578035 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:54 managed-node1 podman[35592]: 2025-07-08 08:49:54.851914638 -0400 EDT m=+0.082871431 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:54 managed-node1 auth_test_1_kube-auth_test_1_kube[35603]: This container is intended for podman CI testing\nJul 08 08:49:54 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:54 managed-node1 podman[35607]: 2025-07-08 08:49:54.881117621 -0400 EDT m=+0.020200537 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:54 managed-node1 podman[35607]: 2025-07-08 08:49:54.893647143 -0400 EDT m=+0.032730046 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 08 08:49:54 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5668.\nJul 08 08:49:54 managed-node1 podman[35607]: 2025-07-08 08:49:54.94675282 -0400 EDT m=+0.085835930 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:54 managed-node1 podman[35607]: 2025-07-08 08:49:54.950642796 -0400 EDT m=+0.089725652 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:54 managed-node1 auth_test_1_kube-auth_test_1_kube[35619]: This container is intended for podman CI testing\nJul 08 08:49:54 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:54 managed-node1 conmon[35619]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:54 managed-node1 podman[35647]: 2025-07-08 08:49:54.993222726 -0400 EDT m=+0.028202975 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:55 managed-node1 podman[35647]: 2025-07-08 08:49:55.008067372 -0400 EDT m=+0.043047680 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:55 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5675.\nJul 08 08:49:55 managed-node1 podman[35647]: 2025-07-08 08:49:55.0606935 -0400 EDT m=+0.095673937 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:55 managed-node1 podman[35647]: 2025-07-08 08:49:55.063449151 -0400 EDT m=+0.098429455 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:55 managed-node1 auth_test_1_kube-auth_test_1_kube[35694]: This container is intended for podman CI testing\nJul 08 08:49:55 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:55 managed-node1 podman[35716]: 2025-07-08 08:49:55.112920274 -0400 EDT m=+0.036372698 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:55 managed-node1 podman[35716]: 2025-07-08 08:49:55.126570749 -0400 EDT m=+0.050023173 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:55 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5682.\nJul 08 08:49:55 managed-node1 podman[35716]: 2025-07-08 08:49:55.186796426 -0400 EDT m=+0.110248987 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:55 managed-node1 auth_test_1_kube-auth_test_1_kube[35779]: This container is intended for podman CI testing\nJul 08 08:49:55 managed-node1 podman[35716]: 2025-07-08 08:49:55.19058706 -0400 EDT m=+0.114039398 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:55 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:55 managed-node1 podman[35783]: 2025-07-08 08:49:55.237215238 -0400 EDT m=+0.031105664 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:55 managed-node1 podman[35783]: 2025-07-08 08:49:55.25293569 -0400 EDT m=+0.046825984 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:55 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5689.\nJul 08 08:49:55 managed-node1 podman[35783]: 2025-07-08 08:49:55.342917667 -0400 EDT m=+0.136808092 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:55 managed-node1 auth_test_1_kube-auth_test_1_kube[35794]: This container is intended for podman CI testing\nJul 08 08:49:55 managed-node1 podman[35783]: 2025-07-08 08:49:55.349068439 -0400 EDT m=+0.142958773 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:55 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:55 managed-node1 conmon[35794]: conmon 1ac36cb6a3d087c731d7 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice/libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope/container/memory.events\nJul 08 08:49:55 managed-node1 podman[35799]: 2025-07-08 08:49:55.397578783 -0400 EDT m=+0.029895789 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:55 managed-node1 podman[35799]: 2025-07-08 08:49:55.411485494 -0400 EDT m=+0.043802478 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 08 08:49:55 managed-node1 python3.12[35778]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None\nJul 08 08:49:55 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5696.\nJul 08 08:49:55 managed-node1 podman[35799]: 2025-07-08 08:49:55.524711388 -0400 EDT m=+0.157028332 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:55 managed-node1 podman[35799]: 2025-07-08 08:49:55.530282716 -0400 EDT m=+0.162599610 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:55 managed-node1 auth_test_1_kube-auth_test_1_kube[35811]: This container is intended for podman CI testing\nJul 08 08:49:55 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:55 managed-node1 systemd[1]: Reload requested from client PID 35818 ('systemctl') (unit session-5.scope)...\nJul 08 08:49:55 managed-node1 systemd[1]: Reloading...\nJul 08 08:49:55 managed-node1 podman[35817]: 2025-07-08 08:49:55.593084177 -0400 EDT m=+0.047755188 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:55 managed-node1 podman[35817]: 2025-07-08 08:49:55.609674353 -0400 EDT m=+0.064345340 container restart 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:55 managed-node1 systemd-rc-local-generator[35872]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 08 08:49:55 managed-node1 systemd[1]: Reloading finished in 229 ms.\nJul 08 08:49:55 managed-node1 systemd[1]: Stopping podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service - A template for running K8s workloads via podman-kube-play...\n\u2591\u2591 Subject: A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5703.\nJul 08 08:49:55 managed-node1 systemd[1]: Started libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5704.\nJul 08 08:49:55 managed-node1 podman[35817]: 2025-07-08 08:49:55.870407173 -0400 EDT m=+0.325078107 container init 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 08 08:49:55 managed-node1 podman[35817]: 2025-07-08 08:49:55.873119723 -0400 EDT m=+0.327790771 container start 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:55 managed-node1 auth_test_1_kube-auth_test_1_kube[35885]: This container is intended for podman CI testing\nJul 08 08:49:55 managed-node1 systemd[1]: libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07.scope has successfully entered the 'dead' state.\nJul 08 08:49:55 managed-node1 podman[35886]: 2025-07-08 08:49:55.890939078 -0400 EDT m=+0.054229148 pod stop c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe (image=, name=auth_test_1_kube)\nJul 08 08:49:55 managed-node1 podman[35886]: 2025-07-08 08:49:55.895856762 -0400 EDT m=+0.059146999 container died 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:55 managed-node1 systemd[1]: var-lib-containers-storage-overlay-a3f2d24d69694007b4135875ca1c98dc5a11a388333a5edad571fa1e5b8ff0f8-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-a3f2d24d69694007b4135875ca1c98dc5a11a388333a5edad571fa1e5b8ff0f8-merged.mount has successfully entered the 'dead' state.\nJul 08 08:49:55 managed-node1 podman[35886]: 2025-07-08 08:49:55.93896244 -0400 EDT m=+0.102252402 container cleanup 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 08 08:49:55 managed-node1 systemd[1]: libpod-48ec4542d282038db01fc6c7ad1ebb663235fcef10f4d93e5b03692ced2a2c70.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-48ec4542d282038db01fc6c7ad1ebb663235fcef10f4d93e5b03692ced2a2c70.scope has successfully entered the 'dead' state.\nJul 08 08:49:55 managed-node1 podman[35886]: 2025-07-08 08:49:55.958546219 -0400 EDT m=+0.121836545 container died 48ec4542d282038db01fc6c7ad1ebb663235fcef10f4d93e5b03692ced2a2c70 (image=, name=c71b45337dde-infra, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:55 managed-node1 systemd[1]: run-p20064-i20364.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-p20064-i20364.scope has successfully entered the 'dead' state.\nJul 08 08:49:55 managed-node1 kernel: podman1: port 1(veth1) entered disabled state\nJul 08 08:49:55 managed-node1 kernel: veth1 (unregistering): left allmulticast mode\nJul 08 08:49:55 managed-node1 kernel: veth1 (unregistering): left promiscuous mode\nJul 08 08:49:55 managed-node1 kernel: podman1: port 1(veth1) entered disabled state\nJul 08 08:49:56 managed-node1 NetworkManager[721]: [1751978996.0048] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed')\nJul 08 08:49:56 managed-node1 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5711.\nJul 08 08:49:56 managed-node1 systemd[1]: run-netns-netns\\x2d23bde762\\x2d8f16\\x2d7128\\x2db248\\x2d149bdcabb8bf.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-netns-netns\\x2d23bde762\\x2d8f16\\x2d7128\\x2db248\\x2d149bdcabb8bf.mount has successfully entered the 'dead' state.\nJul 08 08:49:56 managed-node1 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5711.\nJul 08 08:49:56 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-48ec4542d282038db01fc6c7ad1ebb663235fcef10f4d93e5b03692ced2a2c70-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-48ec4542d282038db01fc6c7ad1ebb663235fcef10f4d93e5b03692ced2a2c70-rootfs-merge.mount has successfully entered the 'dead' state.\nJul 08 08:49:56 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-48ec4542d282038db01fc6c7ad1ebb663235fcef10f4d93e5b03692ced2a2c70-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-48ec4542d282038db01fc6c7ad1ebb663235fcef10f4d93e5b03692ced2a2c70-userdata-shm.mount has successfully entered the 'dead' state.\nJul 08 08:49:56 managed-node1 podman[35886]: 2025-07-08 08:49:56.064750431 -0400 EDT m=+0.228040415 container cleanup 48ec4542d282038db01fc6c7ad1ebb663235fcef10f4d93e5b03692ced2a2c70 (image=, name=c71b45337dde-infra, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:56 managed-node1 systemd[1]: Removed slice machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice - cgroup machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice.\n\u2591\u2591 Subject: A stop job for unit machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5790 and the job result is done.\nJul 08 08:49:56 managed-node1 systemd[1]: machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice: Consumed 1.811s CPU time, 1M memory peak.\n\u2591\u2591 Subject: Resources consumed by unit runtime\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice completed and consumed the indicated resources.\nJul 08 08:49:56 managed-node1 podman[35886]: 2025-07-08 08:49:56.071432875 -0400 EDT m=+0.234722869 pod stop c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe (image=, name=auth_test_1_kube)\nJul 08 08:49:56 managed-node1 systemd[1]: machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice: Failed to open /run/systemd/transient/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice: No such file or directory\nJul 08 08:49:56 managed-node1 podman[35886]: 2025-07-08 08:49:56.098237058 -0400 EDT m=+0.261527049 container remove 1ac36cb6a3d087c731d756a6144abe002b5dff3153e6ed07ea9d835b01978b07 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 08 08:49:56 managed-node1 podman[35886]: 2025-07-08 08:49:56.125616889 -0400 EDT m=+0.288906885 container remove 48ec4542d282038db01fc6c7ad1ebb663235fcef10f4d93e5b03692ced2a2c70 (image=, name=c71b45337dde-infra, pod_id=c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:56 managed-node1 systemd[1]: machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice: Failed to open /run/systemd/transient/machine-libpod_pod_c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe.slice: No such file or directory\nJul 08 08:49:56 managed-node1 podman[35886]: 2025-07-08 08:49:56.134735427 -0400 EDT m=+0.298025390 pod remove c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe (image=, name=auth_test_1_kube)\nJul 08 08:49:56 managed-node1 podman[35886]: 2025-07-08 08:49:56.138033275 -0400 EDT m=+0.301323520 container kill b4c59e52584f9100b63cd3e9f3678cfdc7322306f3c4eda8222b7e2f06624b58 (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:56 managed-node1 systemd[1]: libpod-b4c59e52584f9100b63cd3e9f3678cfdc7322306f3c4eda8222b7e2f06624b58.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b4c59e52584f9100b63cd3e9f3678cfdc7322306f3c4eda8222b7e2f06624b58.scope has successfully entered the 'dead' state.\nJul 08 08:49:56 managed-node1 conmon[20044]: conmon b4c59e52584f9100b63c : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/libpod-b4c59e52584f9100b63cd3e9f3678cfdc7322306f3c4eda8222b7e2f06624b58.scope/container/memory.events\nJul 08 08:49:56 managed-node1 podman[35886]: 2025-07-08 08:49:56.145688213 -0400 EDT m=+0.308978342 container died b4c59e52584f9100b63cd3e9f3678cfdc7322306f3c4eda8222b7e2f06624b58 (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:56 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-b4c59e52584f9100b63cd3e9f3678cfdc7322306f3c4eda8222b7e2f06624b58-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-b4c59e52584f9100b63cd3e9f3678cfdc7322306f3c4eda8222b7e2f06624b58-rootfs-merge.mount has successfully entered the 'dead' state.\nJul 08 08:49:56 managed-node1 podman[35886]: 2025-07-08 08:49:56.204852934 -0400 EDT m=+0.368142936 container remove b4c59e52584f9100b63cd3e9f3678cfdc7322306f3c4eda8222b7e2f06624b58 (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 08 08:49:56 managed-node1 podman[35886]: Pods stopped:\nJul 08 08:49:56 managed-node1 podman[35886]: c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe\nJul 08 08:49:56 managed-node1 podman[35886]: Pods removed:\nJul 08 08:49:56 managed-node1 podman[35886]: c71b45337dde4a286f42fe8868f7b8a7e16633b76690724b474303c61f86c0fe\nJul 08 08:49:56 managed-node1 podman[35886]: Secrets removed:\nJul 08 08:49:56 managed-node1 podman[35886]: Volumes removed:\nJul 08 08:49:56 managed-node1 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has successfully entered the 'dead' state.\nJul 08 08:49:56 managed-node1 systemd[1]: Stopped podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service - A template for running K8s workloads via podman-kube-play.\n\u2591\u2591 Subject: A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5703 and the job result is done.\nJul 08 08:49:56 managed-node1 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service: Consumed 33.643s CPU time, 37.2M memory peak.\n\u2591\u2591 Subject: Resources consumed by unit runtime\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service completed and consumed the indicated resources.\nJul 08 08:49:56 managed-node1 python3.12[36105]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:49:56 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-b4c59e52584f9100b63cd3e9f3678cfdc7322306f3c4eda8222b7e2f06624b58-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-b4c59e52584f9100b63cd3e9f3678cfdc7322306f3c4eda8222b7e2f06624b58-userdata-shm.mount has successfully entered the 'dead' state.\nJul 08 08:49:57 managed-node1 python3.12[36262]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 08 08:49:57 managed-node1 python3.12[36262]: ansible-containers.podman.podman_play version: 5.5.1, kube file /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml\nJul 08 08:49:57 managed-node1 python3.12[36430]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:49:58 managed-node1 python3.12[36585]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:50:00 managed-node1 python3.12[36742]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None\nJul 08 08:50:00 managed-node1 systemd[1]: Reload requested from client PID 36745 ('systemctl') (unit session-5.scope)...\nJul 08 08:50:00 managed-node1 systemd[1]: Reloading...\nJul 08 08:50:00 managed-node1 systemd-rc-local-generator[36793]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 08 08:50:00 managed-node1 systemd[1]: Reloading finished in 203 ms.\nJul 08 08:50:01 managed-node1 python3.12[36957]: ansible-stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:50:01 managed-node1 python3.12[37270]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:50:04 managed-node1 python3.12[37582]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:50:05 managed-node1 python3.12[37743]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:50:06 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nJul 08 08:50:07 managed-node1 python3.12[37901]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:50:09 managed-node1 python3.12[38058]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:50:10 managed-node1 python3.12[38215]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:50:11 managed-node1 python3.12[38372]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:50:12 managed-node1 python3.12[38528]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None\nJul 08 08:50:12 managed-node1 python3.12[38685]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:50:13 managed-node1 python3.12[38840]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:50:14 managed-node1 python3.12[38995]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:50:15 managed-node1 python3.12[39152]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None\nJul 08 08:50:15 managed-node1 systemd[1]: Reload requested from client PID 39155 ('systemctl') (unit session-5.scope)...\nJul 08 08:50:15 managed-node1 systemd[1]: Reloading...\nJul 08 08:50:15 managed-node1 systemd-rc-local-generator[39190]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 08 08:50:15 managed-node1 systemd[1]: Reloading finished in 216 ms.\nJul 08 08:50:16 managed-node1 python3.12[39364]: ansible-stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:50:17 managed-node1 python3.12[39676]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:50:19 managed-node1 python3.12[39987]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:50:20 managed-node1 python3.12[40148]: ansible-getent Invoked with database=passwd key=auth_test_user1 fail_key=False service=None split=None\nJul 08 08:50:20 managed-node1 python3.12[40304]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:50:21 managed-node1 python3.12[40460]: ansible-user Invoked with name=auth_test_user1 state=absent non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None\nJul 08 08:50:21 managed-node1 python3.12[40616]: ansible-file Invoked with path=/home/auth_test_user1 state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:50:22 managed-node1 python3.12[40771]: ansible-ansible.legacy.command Invoked with _raw_params=podman inspect podman_registry --format '{{range .}}{{range .Mounts}}{{if eq .Type \"volume\"}}{{.Name}}{{end}}{{end}}{{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:50:22 managed-node1 python3.12[40935]: ansible-ansible.legacy.command Invoked with _raw_params=podman rm -f podman_registry _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:50:22 managed-node1 systemd[1]: libpod-f2c95e55ef894afd89ddd278d5b3c1eaf43e3665402627d4291a7a7f163e81bc.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-f2c95e55ef894afd89ddd278d5b3c1eaf43e3665402627d4291a7a7f163e81bc.scope has successfully entered the 'dead' state.\nJul 08 08:50:22 managed-node1 podman[40936]: 2025-07-08 08:50:22.67414362 -0400 EDT m=+0.041656966 container died f2c95e55ef894afd89ddd278d5b3c1eaf43e3665402627d4291a7a7f163e81bc (image=quay.io/libpod/registry:2.8.2, name=podman_registry)\nJul 08 08:50:22 managed-node1 kernel: podman0: port 1(veth0) entered disabled state\nJul 08 08:50:22 managed-node1 kernel: veth0 (unregistering): left allmulticast mode\nJul 08 08:50:22 managed-node1 kernel: veth0 (unregistering): left promiscuous mode\nJul 08 08:50:22 managed-node1 kernel: podman0: port 1(veth0) entered disabled state\nJul 08 08:50:22 managed-node1 NetworkManager[721]: [1751979022.7196] device (podman0): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed')\nJul 08 08:50:22 managed-node1 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5791.\nJul 08 08:50:22 managed-node1 systemd[1]: run-netns-netns\\x2d470568dd\\x2dcb1f\\x2d0545\\x2d4ce3\\x2d303f19bb0757.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-netns-netns\\x2d470568dd\\x2dcb1f\\x2d0545\\x2d4ce3\\x2d303f19bb0757.mount has successfully entered the 'dead' state.\nJul 08 08:50:22 managed-node1 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5791.\nJul 08 08:50:22 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-f2c95e55ef894afd89ddd278d5b3c1eaf43e3665402627d4291a7a7f163e81bc-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-f2c95e55ef894afd89ddd278d5b3c1eaf43e3665402627d4291a7a7f163e81bc-userdata-shm.mount has successfully entered the 'dead' state.\nJul 08 08:50:22 managed-node1 systemd[1]: var-lib-containers-storage-overlay-1edf2a61e7717e54fe2166194ec642baa4ae0ab463595f28a4f079d84846a39e-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-1edf2a61e7717e54fe2166194ec642baa4ae0ab463595f28a4f079d84846a39e-merged.mount has successfully entered the 'dead' state.\nJul 08 08:50:22 managed-node1 podman[40936]: 2025-07-08 08:50:22.814620775 -0400 EDT m=+0.182133970 container remove f2c95e55ef894afd89ddd278d5b3c1eaf43e3665402627d4291a7a7f163e81bc (image=quay.io/libpod/registry:2.8.2, name=podman_registry)\nJul 08 08:50:22 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 08 08:50:22 managed-node1 systemd[1]: libpod-conmon-f2c95e55ef894afd89ddd278d5b3c1eaf43e3665402627d4291a7a7f163e81bc.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-conmon-f2c95e55ef894afd89ddd278d5b3c1eaf43e3665402627d4291a7a7f163e81bc.scope has successfully entered the 'dead' state.\nJul 08 08:50:23 managed-node1 python3.12[41128]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume rm adb0a365ad5e043225f1b238691d0f909d8df3bc7ab8077f6ef439c4ae00bfe7 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:50:23 managed-node1 podman[41129]: 2025-07-08 08:50:23.305299712 -0400 EDT m=+0.024461642 volume remove adb0a365ad5e043225f1b238691d0f909d8df3bc7ab8077f6ef439c4ae00bfe7\nJul 08 08:50:23 managed-node1 python3.12[41290]: ansible-file Invoked with path=/tmp/lsr_g_pgovnt_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:50:25 managed-node1 python3.12[41496]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 08 08:50:26 managed-node1 python3.12[41680]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:50:27 managed-node1 python3.12[41835]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:50:29 managed-node1 python3.12[42145]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:50:30 managed-node1 python3.12[42306]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 08 08:50:30 managed-node1 python3.12[42462]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:50:32 managed-node1 python3.12[42619]: ansible-tempfile Invoked with state=directory prefix=lsr_podman_config_ suffix= path=None\nJul 08 08:50:32 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nJul 08 08:50:32 managed-node1 python3.12[42774]: ansible-ansible.legacy.command Invoked with _raw_params=tar --ignore-failed-read -c -P -v -p -f /tmp/lsr_podman_config_xpiczilj/backup.tar /etc/containers/containers.conf.d/50-systemroles.conf /etc/containers/registries.conf.d/50-systemroles.conf /etc/containers/storage.conf /etc/containers/policy.json _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:50:33 managed-node1 python3.12[42931]: ansible-user Invoked with name=user1 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None\nJul 08 08:50:33 managed-node1 useradd[42933]: new group: name=user1, GID=1000\nJul 08 08:50:33 managed-node1 useradd[42933]: new user: name=user1, UID=1000, GID=1000, home=/home/user1, shell=/bin/bash, from=/dev/pts/0\nJul 08 08:50:35 managed-node1 python3.12[43243]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:50:36 managed-node1 python3.12[43404]: ansible-getent Invoked with database=passwd key=user1 fail_key=False service=None split=None\nJul 08 08:50:36 managed-node1 python3.12[43560]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:50:36 managed-node1 python3.12[43717]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:50:37 managed-node1 python3.12[43873]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:50:38 managed-node1 python3.12[44029]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:50:38 managed-node1 python3.12[44184]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 08 08:50:38 managed-node1 python3.12[44309]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751979038.242837-13178-157508935633128/.source.conf _original_basename=.3t631c0n follow=False checksum=b1776092f2908d76e11fd6af87267469b2c17d5a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:50:39 managed-node1 python3.12[44464]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:50:39 managed-node1 python3.12[44619]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 08 08:50:40 managed-node1 python3.12[44744]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751979039.5151405-13207-260225766438174/.source.conf _original_basename=.8hb8eqwc follow=False checksum=fde25488ce7040f1639af7bfc88ed125318cc0b0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:50:40 managed-node1 python3.12[44899]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:50:40 managed-node1 python3.12[45054]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 08 08:50:41 managed-node1 python3.12[45179]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/storage.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751979040.6993687-13236-206291270506142/.source.conf _original_basename=.24a9rkzz follow=False checksum=38f015f4780579bd388dd955b42916199fd7fe19 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:50:41 managed-node1 python3.12[45334]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:50:42 managed-node1 python3.12[45489]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:50:42 managed-node1 python3.12[45644]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 08 08:50:42 managed-node1 python3.12[45769]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/policy.json owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751979042.2485042-13279-227893683407134/.source.json _original_basename=._jtk04da follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:50:43 managed-node1 python3.12[45924]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:50:43 managed-node1 python3.12[46081]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:50:44 managed-node1 python3.12[46237]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:50:45 managed-node1 python3.12[46393]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:50:46 managed-node1 python3.12[46828]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:50:47 managed-node1 python3.12[46985]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:50:47 managed-node1 python3.12[47141]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:50:48 managed-node1 python3.12[47300]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:50:49 managed-node1 python3.12[47457]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:50:49 managed-node1 python3.12[47614]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:50:49 managed-node1 python3.12[47771]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:50:50 managed-node1 python3.12[47928]: ansible-ansible.legacy.command Invoked with _raw_params=grep 'container_name_as_hostname[ ]*=[ ]*true' /home/user1/.config/containers/containers.conf.d/50-systemroles.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:50:52 managed-node1 python3.12[48239]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:50:53 managed-node1 python3.12[48401]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:50:53 managed-node1 python3.12[48558]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:50:54 managed-node1 python3.12[48714]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:50:54 managed-node1 python3.12[48870]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:50:55 managed-node1 python3.12[49025]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 08 08:50:55 managed-node1 python3.12[49103]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf _original_basename=.0gnlhomz recurse=False state=file path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:50:56 managed-node1 python3.12[49258]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:50:56 managed-node1 python3.12[49413]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 08 08:50:56 managed-node1 python3.12[49491]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf _original_basename=.accmlokk recurse=False state=file path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:50:57 managed-node1 python3.12[49646]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:50:57 managed-node1 python3.12[49801]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 08 08:50:57 managed-node1 python3.12[49879]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/storage.conf _original_basename=.pvcmfj7i recurse=False state=file path=/home/user1/.config/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:50:58 managed-node1 python3.12[50034]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:50:58 managed-node1 python3.12[50189]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:50:59 managed-node1 python3.12[50346]: ansible-slurp Invoked with path=/home/user1/.config/containers/policy.json src=/home/user1/.config/containers/policy.json\nJul 08 08:51:00 managed-node1 python3.12[50501]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:00 managed-node1 python3.12[50658]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:51:00 managed-node1 python3.12[50814]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:51:01 managed-node1 python3.12[50970]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:03 managed-node1 python3.12[51358]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:03 managed-node1 python3.12[51515]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:51:03 managed-node1 python3.12[51671]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:51:04 managed-node1 python3.12[51827]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:05 managed-node1 python3.12[51984]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:05 managed-node1 python3.12[52141]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:05 managed-node1 python3.12[52298]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:08 managed-node1 python3.12[52610]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:51:08 managed-node1 python3.12[52771]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 08 08:51:09 managed-node1 python3.12[52927]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:10 managed-node1 python3.12[53084]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:10 managed-node1 python3.12[53239]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 08 08:51:11 managed-node1 python3.12[53364]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/containers.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751979070.540365-14172-121943904920472/.source.conf _original_basename=.q56vcedp follow=False checksum=b1776092f2908d76e11fd6af87267469b2c17d5a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:11 managed-node1 python3.12[53519]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:12 managed-node1 python3.12[53674]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 08 08:51:12 managed-node1 python3.12[53799]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/registries.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751979071.810814-14217-46013201585375/.source.conf _original_basename=.p9it418x follow=False checksum=fde25488ce7040f1639af7bfc88ed125318cc0b0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:12 managed-node1 python3.12[53954]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:13 managed-node1 python3.12[54109]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 08 08:51:13 managed-node1 python3.12[54234]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/storage.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751979073.1248894-14268-85562215010734/.source.conf _original_basename=.scvdoudg follow=False checksum=38f015f4780579bd388dd955b42916199fd7fe19 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:14 managed-node1 python3.12[54389]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:14 managed-node1 python3.12[54544]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:15 managed-node1 python3.12[54701]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json\nJul 08 08:51:15 managed-node1 python3.12[54856]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 08 08:51:15 managed-node1 python3.12[54983]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/policy.json owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751979075.3179543-14355-115139051150641/.source.json _original_basename=.bjdfcfs5 follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:16 managed-node1 python3.12[55138]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:17 managed-node1 python3.12[55295]: ansible-file Invoked with path=/root/.config/containers state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:19 managed-node1 python3.12[55732]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:20 managed-node1 python3.12[55889]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:20 managed-node1 python3.12[56046]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:21 managed-node1 python3.12[56203]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:21 managed-node1 python3.12[56360]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:23 managed-node1 python3.12[56674]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:51:24 managed-node1 python3.12[56836]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:25 managed-node1 python3.12[56993]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:26 managed-node1 python3.12[57148]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 08 08:51:26 managed-node1 python3.12[57226]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/containers.conf.d/50-systemroles.conf _original_basename=.qxvg1nyv recurse=False state=file path=/etc/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:26 managed-node1 python3.12[57381]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:27 managed-node1 python3.12[57536]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 08 08:51:27 managed-node1 python3.12[57614]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/registries.conf.d/50-systemroles.conf _original_basename=.rx7m24ak recurse=False state=file path=/etc/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:28 managed-node1 python3.12[57769]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:28 managed-node1 python3.12[57924]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 08 08:51:29 managed-node1 python3.12[58002]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/storage.conf _original_basename=.bdv1ih5b recurse=False state=file path=/etc/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:29 managed-node1 python3.12[58157]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:29 managed-node1 python3.12[58312]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:30 managed-node1 python3.12[58469]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json\nJul 08 08:51:31 managed-node1 python3.12[58624]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:32 managed-node1 python3.12[58781]: ansible-file Invoked with path=/root/.config/containers state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:34 managed-node1 python3.12[59169]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:35 managed-node1 python3.12[59326]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:35 managed-node1 python3.12[59483]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:35 managed-node1 python3.12[59640]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:36 managed-node1 python3.12[59797]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:36 managed-node1 python3.12[59954]: ansible-slurp Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf src=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf\nJul 08 08:51:37 managed-node1 python3.12[60109]: ansible-slurp Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf src=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf\nJul 08 08:51:37 managed-node1 python3.12[60264]: ansible-slurp Invoked with path=/home/user1/.config/containers/storage.conf src=/home/user1/.config/containers/storage.conf\nJul 08 08:51:38 managed-node1 python3.12[60419]: ansible-slurp Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf src=/etc/containers/containers.conf.d/50-systemroles.conf\nJul 08 08:51:38 managed-node1 python3.12[60574]: ansible-slurp Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf src=/etc/containers/registries.conf.d/50-systemroles.conf\nJul 08 08:51:39 managed-node1 python3.12[60729]: ansible-slurp Invoked with path=/etc/containers/storage.conf src=/etc/containers/storage.conf\nJul 08 08:51:41 managed-node1 python3.12[61039]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:51:42 managed-node1 python3.12[61200]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:43 managed-node1 python3.12[61357]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:43 managed-node1 python3.12[61512]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 08 08:51:44 managed-node1 python3.12[61639]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/containers.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751979103.6532533-15389-229593234271028/.source.conf _original_basename=.nwfb8ndj follow=False checksum=9694c1d1c700a6435eecf4066b052584f4ee94c0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:44 managed-node1 python3.12[61794]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:45 managed-node1 python3.12[61949]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 08 08:51:45 managed-node1 python3.12[62027]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/registries.conf.d/50-systemroles.conf _original_basename=.fsfm1l81 recurse=False state=file path=/etc/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:46 managed-node1 python3.12[62182]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:46 managed-node1 python3.12[62337]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 08 08:51:46 managed-node1 python3.12[62415]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/storage.conf _original_basename=.rj1uoysr recurse=False state=file path=/etc/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:47 managed-node1 python3.12[62570]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:47 managed-node1 python3.12[62725]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:48 managed-node1 python3.12[62882]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json\nJul 08 08:51:48 managed-node1 python3.12[63037]: ansible-slurp Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf src=/etc/containers/containers.conf.d/50-systemroles.conf\nJul 08 08:51:49 managed-node1 python3.12[63192]: ansible-file Invoked with state=absent path=/etc/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:49 managed-node1 python3.12[63347]: ansible-file Invoked with state=absent path=/etc/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:50 managed-node1 python3.12[63502]: ansible-file Invoked with state=absent path=/etc/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:50 managed-node1 python3.12[63657]: ansible-file Invoked with state=absent path=/etc/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:50 managed-node1 python3.12[63812]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:51 managed-node1 python3.12[63967]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:51 managed-node1 python3.12[64122]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:51 managed-node1 python3.12[64277]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:52 managed-node1 python3.12[64432]: ansible-file Invoked with state=absent path=/root/.config/containers/auth.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:52 managed-node1 python3.12[64587]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/auth.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:53 managed-node1 python3.12[64742]: ansible-ansible.legacy.command Invoked with _raw_params=tar xfvpP /tmp/lsr_podman_config_xpiczilj/backup.tar _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:51:53 managed-node1 python3.12[64898]: ansible-file Invoked with state=absent path=/tmp/lsr_podman_config_xpiczilj recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:51:55 managed-node1 python3.12[65104]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 08 08:51:56 managed-node1 python3.12[65261]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:56 managed-node1 systemd[4462]: Created slice background.slice - User Background Tasks Slice.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 14.\nJul 08 08:51:56 managed-node1 systemd[4462]: Starting systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories...\n\u2591\u2591 Subject: A start job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 13.\nJul 08 08:51:56 managed-node1 systemd[4462]: Finished systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 13.\nJul 08 08:51:56 managed-node1 python3.12[65418]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:51:58 managed-node1 python3.12[65728]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:52:00 managed-node1 python3.12[65889]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 08 08:52:00 managed-node1 python3.12[66045]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:52:03 managed-node1 python3.12[66253]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 08 08:52:06 managed-node1 python3.12[66437]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:52:06 managed-node1 python3.12[66592]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:52:08 managed-node1 python3.12[66902]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:52:09 managed-node1 python3.12[67063]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 08 08:52:10 managed-node1 python3.12[67219]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:52:13 managed-node1 python3.12[67427]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 08 08:52:14 managed-node1 python3.12[67611]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:52:15 managed-node1 python3.12[67766]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:52:17 managed-node1 python3.12[68076]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:52:18 managed-node1 python3.12[68237]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 08 08:52:18 managed-node1 python3.12[68393]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:52:20 managed-node1 python3.12[68550]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:52:22 managed-node1 python3.12[68707]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:52:22 managed-node1 python3.12[68862]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 08 08:52:23 managed-node1 python3.12[68987]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751979142.423505-17133-38145694146989/.source.container dest=/etc/containers/systemd/nopull.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=670d64fc68a9768edb20cad26df2acc703542d85 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:52:25 managed-node1 python3.12[69297]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:52:26 managed-node1 python3.12[69458]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:52:27 managed-node1 python3.12[69615]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:52:29 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 08 08:52:29 managed-node1 podman[69781]: 2025-07-08 08:52:29.239882519 -0400 EDT m=+0.022057902 image pull-error this_is_a_bogus_image:latest short-name resolution enforced but cannot prompt without a TTY\nJul 08 08:52:29 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 08 08:52:29 managed-node1 python3.12[69942]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:52:30 managed-node1 python3.12[70097]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/bogus.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 08 08:52:30 managed-node1 python3.12[70222]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751979149.8133175-17320-179092701161425/.source.container dest=/etc/containers/systemd/bogus.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=1d087e679d135214e8ac9ccaf33b2222916efb7f backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 08 08:52:33 managed-node1 python3.12[70532]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:52:34 managed-node1 python3.12[70693]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:52:36 managed-node1 python3.12[70851]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:52:37 managed-node1 python3.12[71008]: ansible-systemd Invoked with name=nopull.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None\nJul 08 08:52:37 managed-node1 python3.12[71164]: ansible-stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 08 08:52:38 managed-node1 python3.12[71476]: ansible-ansible.legacy.command Invoked with _raw_params=set -x\n set -o pipefail\n exec 1>&2\n #podman volume rm --all\n #podman network prune -f\n podman volume ls\n podman network ls\n podman secret ls\n podman container ls\n podman pod ls\n podman images\n systemctl list-units | grep quadlet\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:52:38 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 08 08:52:38 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 08 08:52:39 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 08 08:52:40 managed-node1 python3.12[71831]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 08 08:52:41 managed-node1 python3.12[71994]: ansible-getent Invoked with database=passwd key=user_quadlet_basic fail_key=False service=None split=None\nJul 08 08:52:42 managed-node1 python3.12[72150]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None", "task_name": "Dump journal", "task_path": "/tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:336" } ] SYSTEM ROLES ERRORS END v1 TASKS RECAP ******************************************************************** Tuesday 08 July 2025 08:52:42 -0400 (0:00:00.444) 0:00:28.980 ********** =============================================================================== fedora.linux_system_roles.podman : Gather the package facts ------------- 1.20s /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Gathering Facts --------------------------------------------------------- 1.15s /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:9 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.04s /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Gather the package facts ------------- 0.96s /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Gather the package facts ------------- 0.95s /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Ensure quadlet file is present ------- 0.80s /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75 fedora.linux_system_roles.podman : Stop and disable service ------------- 0.72s /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 fedora.linux_system_roles.podman : Ensure quadlet file is present ------- 0.72s /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75 fedora.linux_system_roles.podman : Get user information ----------------- 0.59s /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 fedora.linux_system_roles.podman : Ensure container images are present --- 0.59s /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Debug3 ------------------------------------------------------------------ 0.54s /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:270 fedora.linux_system_roles.podman : Get podman version ------------------- 0.49s /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 fedora.linux_system_roles.podman : Ensure the quadlet directory is present --- 0.48s /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 fedora.linux_system_roles.podman : Slurp quadlet file ------------------- 0.45s /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Dump journal ------------------------------------------------------------ 0.44s /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:336 fedora.linux_system_roles.podman : Check if system is ostree ------------ 0.44s /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 fedora.linux_system_roles.podman : See if getsubids exists -------------- 0.43s /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 fedora.linux_system_roles.podman : Get podman version ------------------- 0.43s /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 fedora.linux_system_roles.podman : See if getsubids exists -------------- 0.41s /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 fedora.linux_system_roles.podman : Get podman version ------------------- 0.40s /tmp/collections-ctS/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46