ansible-playbook [core 2.17.7] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-nXs executable location = /usr/local/bin/ansible-playbook python version = 3.12.8 (main, Dec 3 2024, 00:00:00) [GCC 14.2.1 20241104 (Red Hat 14.2.1-6)] (/usr/bin/python3.12) jinja version = 3.1.4 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_quadlet_demo.yml *********************************************** 2 plays in /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:5 Saturday 04 January 2025 11:33:03 -0500 (0:00:00.012) 0:00:00.012 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/tmp/podman-HHE/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Deploy the quadlet demo app] ********************************************* TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9 Saturday 04 January 2025 11:33:03 -0500 (0:00:00.055) 0:00:00.067 ****** [WARNING]: Platform linux on host managed-node2 is using the discovered Python interpreter at /usr/bin/python3.12, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node2] TASK [Test is only supported on x86_64] **************************************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:38 Saturday 04 January 2025 11:33:05 -0500 (0:00:01.374) 0:00:01.442 ****** skipping: [managed-node2] => { "false_condition": "ansible_facts[\"architecture\"] != \"x86_64\"" } TASK [End test] **************************************************************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:45 Saturday 04 January 2025 11:33:05 -0500 (0:00:00.025) 0:00:01.467 ****** META: end_play conditional evaluated to False, continuing play skipping: [managed-node2] => { "skip_reason": "end_play conditional evaluated to False, continuing play" } MSG: end_play TASK [Generate certificates] *************************************************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:51 Saturday 04 January 2025 11:33:05 -0500 (0:00:00.016) 0:00:01.484 ****** included: fedora.linux_system_roles.certificate for managed-node2 TASK [fedora.linux_system_roles.certificate : Set version specific variables] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:2 Saturday 04 January 2025 11:33:05 -0500 (0:00:00.077) 0:00:01.561 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.certificate : Ensure ansible_facts used by role] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:2 Saturday 04 January 2025 11:33:05 -0500 (0:00:00.033) 0:00:01.594 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__certificate_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.certificate : Check if system is ostree] ******* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:10 Saturday 04 January 2025 11:33:05 -0500 (0:00:00.036) 0:00:01.631 ****** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.certificate : Set flag to indicate system is ostree] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:15 Saturday 04 January 2025 11:33:05 -0500 (0:00:00.429) 0:00:02.061 ****** ok: [managed-node2] => { "ansible_facts": { "__certificate_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.certificate : Set platform/version specific variables] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:19 Saturday 04 January 2025 11:33:05 -0500 (0:00:00.023) 0:00:02.085 ****** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__certificate_certmonger_packages": [ "certmonger", "python3-packaging" ] }, "ansible_included_var_files": [ "/tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__certificate_certmonger_packages": [ "certmonger", "python3-packaging" ] }, "ansible_included_var_files": [ "/tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5 Saturday 04 January 2025 11:33:05 -0500 (0:00:00.039) 0:00:02.124 ****** changed: [managed-node2] => { "changed": true, "rc": 0, "results": [ "Installed: python3-cffi-1.16.0-7.el10.x86_64", "Installed: python3-pyasn1-0.6.1-1.el10.noarch", "Installed: python3-cryptography-43.0.0-4.el10.x86_64", "Installed: python3-ply-3.11-25.el10.noarch", "Installed: python3-pycparser-2.20-16.el10.noarch" ] } lsrpackages: python3-cryptography python3-dbus python3-pyasn1 TASK [fedora.linux_system_roles.certificate : Ensure provider packages are installed] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23 Saturday 04 January 2025 11:33:08 -0500 (0:00:02.234) 0:00:04.358 ****** changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "rc": 0, "results": [ "Installed: nss-sysinit-3.101.0-13.el10.x86_64", "Installed: nss-util-3.101.0-13.el10.x86_64", "Installed: certmonger-0.79.20-3.el10.x86_64", "Installed: python3-packaging-23.2-6.el10.noarch", "Installed: dbus-tools-1:1.14.10-5.el10.x86_64", "Installed: nspr-4.35.0-34.el10.x86_64", "Installed: nss-3.101.0-13.el10.x86_64", "Installed: nss-softokn-3.101.0-13.el10.x86_64", "Installed: nss-softokn-freebl-3.101.0-13.el10.x86_64" ] } lsrpackages: certmonger python3-packaging TASK [fedora.linux_system_roles.certificate : Ensure pre-scripts hooks directory exists] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:35 Saturday 04 January 2025 11:33:10 -0500 (0:00:02.847) 0:00:07.206 ****** changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/certmonger//pre-scripts", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.certificate : Ensure post-scripts hooks directory exists] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:61 Saturday 04 January 2025 11:33:11 -0500 (0:00:00.774) 0:00:07.980 ****** changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/certmonger//post-scripts", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.certificate : Ensure provider service is running] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:90 Saturday 04 January 2025 11:33:12 -0500 (0:00:00.399) 0:00:08.379 ****** changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "enabled": true, "name": "certmonger", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:certmonger_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "system.slice dbus-broker.service basic.target systemd-journald.socket dbus.socket network.target syslog.target sysinit.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedorahosted.certmonger", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "Certificate monitoring and PKI enrollment", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698245632", "EffectiveMemoryMax": "3698245632", "EffectiveTasksMax": "22349", "EnvironmentFiles": "/etc/sysconfig/certmonger (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/certmonger.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "certmonger.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3087900672", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "certmonger.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/certmonger.pid", "PartOf": "dbus-broker.service", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target system.slice dbus.socket", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.certificate : Ensure certificate requests] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:101 Saturday 04 January 2025 11:33:13 -0500 (0:00:01.311) 0:00:09.691 ****** changed: [managed-node2] => (item={'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}) => { "ansible_loop_var": "item", "changed": true, "item": { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } } MSG: Certificate requested (new). TASK [fedora.linux_system_roles.certificate : Slurp the contents of the files] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152 Saturday 04 January 2025 11:33:14 -0500 (0:00:00.944) 0:00:10.636 ****** ok: [managed-node2] => (item=['cert', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnakNDQW1xZ0F3SUJBZ0lRQ2NPWDgwZ0ZUNXFHT1ZMNk12dUwwREFOQmdrcWhraUc5dzBCQVFzRkFEQlEKTVNBd0hnWURWUVFEREJkTWIyTmhiQ0JUYVdkdWFXNW5JRUYxZEdodmNtbDBlVEVzTUNvR0ExVUVBd3dqTURsagpNemszWmpNdE5EZ3dOVFJtT1dFdE9EWXpPVFV5Wm1FdE16Sm1ZamhpWTJZd0hoY05NalV3TVRBME1UWXpNekUwCldoY05Nall3TVRBME1UWXpNekV6V2pBVU1SSXdFQVlEVlFRREV3bHNiMk5oYkdodmMzUXdnZ0VpTUEwR0NTcUcKU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRRFBXbkJSOTBFK0c3QkJOY08rOHo3RWNseGU4Q3cyWnpYbwp1R1hzOFVGdDZBdmF6Z2pPRkVRa2N5Zk95VTV2NVJ0Y25qYVN2ek13THBVVmV5aGtkWWZiRTkvRERkazgvT0RLCjVsaS9wS2VRUnBOMFo0QkEyRm9lY0o1V3dvYzNoMVcrYy9EaThXRU5odGloaWVlbXJNUEgvY0Zod3A4bHViU0UKejJHbTBGTTh2QWpVd3hmRjZkdkdEUzBsS1cveEFmTFBiMHczNUptUHQycnIwS3pUdUljUU9iL2dtL2E3UGhLMwo5dFY1VVlHTXFycnNTQzFOVVg0YzBlQkliUFdPeGRQMVN5VE5HTy9kdmtxaUhjNHVqUTk1R01wUGVvd0JCMFRkCjVCU0U2T05KYjBwRkt4QWowa1ZRQ3VGY0R0a3FzSTFvNHdHVkVBN0RBb00vREViTityZlRBZ01CQUFHamdaTXcKZ1pBd0N3WURWUjBQQkFRREFnV2dNQlFHQTFVZEVRUU5NQXVDQ1d4dlkyRnNhRzl6ZERBZEJnTlZIU1VFRmpBVQpCZ2dyQmdFRkJRY0RBUVlJS3dZQkJRVUhBd0l3REFZRFZSMFRBUUgvQkFJd0FEQWRCZ05WSFE0RUZnUVUxc2ExCkZ0NjNrYjN4aDJ5SXdJVVNnYUU0TEk0d0h3WURWUjBqQkJnd0ZvQVVqa1lESUtGTVl5RVNVRWJLYWFQMi9NM3YKYWdVd0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFIeXJoMVJHUC9Qb3E5OGlpZjBBQ0pVRGV3MUZnZlIzV0VjLwp0N2xoS29ha3cwYWExVEhVRlVPckRoZWV1QTVuSGM3L2FPUndscHVyYmMxWHVrekN1bkxGSnVkdGJUYVc5dlQ1CmJtNVJBL0NVbHkrZngrY2FpZkpjVDhqaWorZTVYem5Jb2V2ck4ySDdDRkRKSXM3MXBmelk1MDhMYnJkRk5neWcKdFJmeExTYklncUhPT2lBYnJrWXJySjQyaGRObU5Xandkd2VHWGNtb3hCMjgzenZ5anZIYnU2TnZENlhneUNiRApCdWtmakdycEJlVk5VN0hlZFNvSExTMDFRa01jcHZpVjVKVExWcWVpMXZYOHNwL3pCMWlyRWxkaEN6cjQzRzJTCitybmpRc25XMU9RWThQYVRnSi9xQzBXalZQMmFVRzgybUFqV3FQcVNiRXpsNEdwWHdJTT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", "encoding": "base64", "item": [ "cert", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/certs/quadlet_demo.crt" } ok: [managed-node2] => (item=['key', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2QUlCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktZd2dnU2lBZ0VBQW9JQkFRRFBXbkJSOTBFK0c3QkIKTmNPKzh6N0VjbHhlOEN3Mlp6WG91R1hzOFVGdDZBdmF6Z2pPRkVRa2N5Zk95VTV2NVJ0Y25qYVN2ek13THBVVgpleWhrZFlmYkU5L0REZGs4L09ESzVsaS9wS2VRUnBOMFo0QkEyRm9lY0o1V3dvYzNoMVcrYy9EaThXRU5odGloCmllZW1yTVBIL2NGaHdwOGx1YlNFejJHbTBGTTh2QWpVd3hmRjZkdkdEUzBsS1cveEFmTFBiMHczNUptUHQycnIKMEt6VHVJY1FPYi9nbS9hN1BoSzM5dFY1VVlHTXFycnNTQzFOVVg0YzBlQkliUFdPeGRQMVN5VE5HTy9kdmtxaQpIYzR1alE5NUdNcFBlb3dCQjBUZDVCU0U2T05KYjBwRkt4QWowa1ZRQ3VGY0R0a3FzSTFvNHdHVkVBN0RBb00vCkRFYk4rcmZUQWdNQkFBRUNnZ0VBTXR6d2FHckthajNJZ25aNXljRkpTRTcxTWFTZ2M4ZmxoV3lXTkFVS0U3ZTAKRUx3eGxMdUw2NVVEMHNtMm4rUUhUalRSRmpZb2pFMGc3T0FGREg5NEQ5bFVMamliYlVacXJkckxNbGRsY3hLYwo0UlR5ZFBXZFJaSkNoTzI2MVhneEpFSzBlRGJWbzhmTUkvUDhDdnFlRzlTM053L1NraUoza1l4OVc3b0djUU1OCm9rdXFPU29teStsV3B4ZUEydjI3WlByTVdXOWx4MXB0dTcyai90L0x2eloyV05LVEJUUkdnN3o0eWxESGtqVmcKMXZGZnhyTFAyVTdCQVExZDFGU3EyV2ZVSTVqaERaa052bWRaUXlmR2Vnc2U3WGhzckNxT281NE1pbkMvbmpCbQpxME9NT1VNcG9IK0RrODlyUzBaS3FhL2NvakZuQytnTzUwNlBVNkFvb1FLQmdRRDMwS2dWdHVBU1ZFVTkvcEM2CjdrNEhQUlV3cHF0bUZvRkZLZG82M1Z6NWpNSVhsdVQvREZqVk8ya1htSHIrM044RitCa3FJajNEajFHVE5YZ2cKUFF0dUphV0krNDBOTXBBWVpOeFBZUC8yWEdNUUlnMm1qVkxvUkFBRUY2eitpMFZWNFozYlBxV3ZnMWpTMnpaNwpUZ3JxSCthQ1EwMXU5NWYzRG5xMWJ0L0lkUUtCZ1FEV002cXpnYkFKcnF4d2s3MVZmUVkrTlRFQWJhOHVaY2ZPClpOalVmbnQ4dHBUT1U2TUlZQnpxaldERFRjeWJleXFobGE5aUU0Z1FPVkMrOTlqalRqK3FjYWFLSHFyMW5HN0wKd2owYkEwdVhWTjJXSFlMbXg3dlZWanUxbGh0d1VpSDZrcWNESW5YN2QxS2kvaWR0N3ppdk1tV3dSUjJneXZ3TgpMK2lKWCtDMkp3S0JnQTZReVJCZHExWG9kaGxxdVA4RHVycGFNTUNuU3pPNDV1ZzF5WW5rTU83S0F5NTN4QVhwCnA5OEFtdkMwR2FhWlBOS1Evb21uY1pIby9wWjBjaGNTLzNUOTZYamZzQzZ5WmgzSTc5b0tqWURoZFVFaGYwUG0KMHBTc1NvUHpFRXYxeTZGOG5WYWhuQU53Nk91Ynp0djRmbE84aEVQcnlNVlk4Q21wUkRVZHl6RnhBb0dBRk9VYgpYTUZrNDd6blUyQUdFeVhNQ3h0ek9VQjUyMFBRZHVDOWloVUFoTXptdlMzSmRKS3puMm5GSzZnV01vQ1VTd2puCi8rQTU0S05HcmlwTlBQY3dITGRUY0p2SFA0aFFBbzVqR1pSN3RqcHdGeW12MEVWaG5GRDZLQitKYzY1L1pBc1UKUWFrRjYwN2JmOHpkR2N0Qk0wM3VQNEN1Z0RYczJuaDI3WmV0dXZjQ2dZQm1JYmNQTnlpV3dhdWNoOTIrb0djRAoyT2NObUJ4RE1ybmNsQWpscEVaYWhXYzhUVUlVSEowelk4QWhSNWZnNGpVbUNtQ25GRVUyNXh4Zm1pK1BvdkZJClZmK1VWbDNCaDFPMmV6em5GVDVrNHd6enNGcmtwL0ZoZWppekFkVnVhTGRvanM1RXNaRzNPNVdDVmlTcDRaeEwKakhZS2wzVWIyRGZJTGRpdk1pMWZXQT09Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K", "encoding": "base64", "item": [ "key", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/private/quadlet_demo.key" } ok: [managed-node2] => (item=['ca', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnakNDQW1xZ0F3SUJBZ0lRQ2NPWDgwZ0ZUNXFHT1ZMNk12dUwwREFOQmdrcWhraUc5dzBCQVFzRkFEQlEKTVNBd0hnWURWUVFEREJkTWIyTmhiQ0JUYVdkdWFXNW5JRUYxZEdodmNtbDBlVEVzTUNvR0ExVUVBd3dqTURsagpNemszWmpNdE5EZ3dOVFJtT1dFdE9EWXpPVFV5Wm1FdE16Sm1ZamhpWTJZd0hoY05NalV3TVRBME1UWXpNekUwCldoY05Nall3TVRBME1UWXpNekV6V2pBVU1SSXdFQVlEVlFRREV3bHNiMk5oYkdodmMzUXdnZ0VpTUEwR0NTcUcKU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRRFBXbkJSOTBFK0c3QkJOY08rOHo3RWNseGU4Q3cyWnpYbwp1R1hzOFVGdDZBdmF6Z2pPRkVRa2N5Zk95VTV2NVJ0Y25qYVN2ek13THBVVmV5aGtkWWZiRTkvRERkazgvT0RLCjVsaS9wS2VRUnBOMFo0QkEyRm9lY0o1V3dvYzNoMVcrYy9EaThXRU5odGloaWVlbXJNUEgvY0Zod3A4bHViU0UKejJHbTBGTTh2QWpVd3hmRjZkdkdEUzBsS1cveEFmTFBiMHczNUptUHQycnIwS3pUdUljUU9iL2dtL2E3UGhLMwo5dFY1VVlHTXFycnNTQzFOVVg0YzBlQkliUFdPeGRQMVN5VE5HTy9kdmtxaUhjNHVqUTk1R01wUGVvd0JCMFRkCjVCU0U2T05KYjBwRkt4QWowa1ZRQ3VGY0R0a3FzSTFvNHdHVkVBN0RBb00vREViTityZlRBZ01CQUFHamdaTXcKZ1pBd0N3WURWUjBQQkFRREFnV2dNQlFHQTFVZEVRUU5NQXVDQ1d4dlkyRnNhRzl6ZERBZEJnTlZIU1VFRmpBVQpCZ2dyQmdFRkJRY0RBUVlJS3dZQkJRVUhBd0l3REFZRFZSMFRBUUgvQkFJd0FEQWRCZ05WSFE0RUZnUVUxc2ExCkZ0NjNrYjN4aDJ5SXdJVVNnYUU0TEk0d0h3WURWUjBqQkJnd0ZvQVVqa1lESUtGTVl5RVNVRWJLYWFQMi9NM3YKYWdVd0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFIeXJoMVJHUC9Qb3E5OGlpZjBBQ0pVRGV3MUZnZlIzV0VjLwp0N2xoS29ha3cwYWExVEhVRlVPckRoZWV1QTVuSGM3L2FPUndscHVyYmMxWHVrekN1bkxGSnVkdGJUYVc5dlQ1CmJtNVJBL0NVbHkrZngrY2FpZkpjVDhqaWorZTVYem5Jb2V2ck4ySDdDRkRKSXM3MXBmelk1MDhMYnJkRk5neWcKdFJmeExTYklncUhPT2lBYnJrWXJySjQyaGRObU5Xandkd2VHWGNtb3hCMjgzenZ5anZIYnU2TnZENlhneUNiRApCdWtmakdycEJlVk5VN0hlZFNvSExTMDFRa01jcHZpVjVKVExWcWVpMXZYOHNwL3pCMWlyRWxkaEN6cjQzRzJTCitybmpRc25XMU9RWThQYVRnSi9xQzBXalZQMmFVRzgybUFqV3FQcVNiRXpsNEdwWHdJTT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", "encoding": "base64", "item": [ "ca", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/certs/quadlet_demo.crt" } TASK [fedora.linux_system_roles.certificate : Create return data] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:160 Saturday 04 January 2025 11:33:15 -0500 (0:00:01.201) 0:00:11.838 ****** ok: [managed-node2] => { "ansible_facts": { "certificate_test_certs": { "quadlet_demo": { "ca": "/etc/pki/tls/certs/quadlet_demo.crt", "ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQCcOX80gFT5qGOVL6MvuL0DANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMDlj\nMzk3ZjMtNDgwNTRmOWEtODYzOTUyZmEtMzJmYjhiY2YwHhcNMjUwMTA0MTYzMzE0\nWhcNMjYwMTA0MTYzMzEzWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPWnBR90E+G7BBNcO+8z7Eclxe8Cw2ZzXo\nuGXs8UFt6AvazgjOFEQkcyfOyU5v5RtcnjaSvzMwLpUVeyhkdYfbE9/DDdk8/ODK\n5li/pKeQRpN0Z4BA2FoecJ5Wwoc3h1W+c/Di8WENhtihieemrMPH/cFhwp8lubSE\nz2Gm0FM8vAjUwxfF6dvGDS0lKW/xAfLPb0w35JmPt2rr0KzTuIcQOb/gm/a7PhK3\n9tV5UYGMqrrsSC1NUX4c0eBIbPWOxdP1SyTNGO/dvkqiHc4ujQ95GMpPeowBB0Td\n5BSE6ONJb0pFKxAj0kVQCuFcDtkqsI1o4wGVEA7DAoM/DEbN+rfTAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU1sa1\nFt63kb3xh2yIwIUSgaE4LI4wHwYDVR0jBBgwFoAUjkYDIKFMYyESUEbKaaP2/M3v\nagUwDQYJKoZIhvcNAQELBQADggEBAHyrh1RGP/Poq98iif0ACJUDew1FgfR3WEc/\nt7lhKoakw0aa1THUFUOrDheeuA5nHc7/aORwlpurbc1XukzCunLFJudtbTaW9vT5\nbm5RA/CUly+fx+caifJcT8jij+e5XznIoevrN2H7CFDJIs71pfzY508LbrdFNgyg\ntRfxLSbIgqHOOiAbrkYrrJ42hdNmNWjwdweGXcmoxB283zvyjvHbu6NvD6XgyCbD\nBukfjGrpBeVNU7HedSoHLS01QkMcpviV5JTLVqei1vX8sp/zB1irEldhCzr43G2S\n+rnjQsnW1OQY8PaTgJ/qC0WjVP2aUG82mAjWqPqSbEzl4GpXwIM=\n-----END CERTIFICATE-----\n", "cert": "/etc/pki/tls/certs/quadlet_demo.crt", "cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQCcOX80gFT5qGOVL6MvuL0DANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMDlj\nMzk3ZjMtNDgwNTRmOWEtODYzOTUyZmEtMzJmYjhiY2YwHhcNMjUwMTA0MTYzMzE0\nWhcNMjYwMTA0MTYzMzEzWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPWnBR90E+G7BBNcO+8z7Eclxe8Cw2ZzXo\nuGXs8UFt6AvazgjOFEQkcyfOyU5v5RtcnjaSvzMwLpUVeyhkdYfbE9/DDdk8/ODK\n5li/pKeQRpN0Z4BA2FoecJ5Wwoc3h1W+c/Di8WENhtihieemrMPH/cFhwp8lubSE\nz2Gm0FM8vAjUwxfF6dvGDS0lKW/xAfLPb0w35JmPt2rr0KzTuIcQOb/gm/a7PhK3\n9tV5UYGMqrrsSC1NUX4c0eBIbPWOxdP1SyTNGO/dvkqiHc4ujQ95GMpPeowBB0Td\n5BSE6ONJb0pFKxAj0kVQCuFcDtkqsI1o4wGVEA7DAoM/DEbN+rfTAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU1sa1\nFt63kb3xh2yIwIUSgaE4LI4wHwYDVR0jBBgwFoAUjkYDIKFMYyESUEbKaaP2/M3v\nagUwDQYJKoZIhvcNAQELBQADggEBAHyrh1RGP/Poq98iif0ACJUDew1FgfR3WEc/\nt7lhKoakw0aa1THUFUOrDheeuA5nHc7/aORwlpurbc1XukzCunLFJudtbTaW9vT5\nbm5RA/CUly+fx+caifJcT8jij+e5XznIoevrN2H7CFDJIs71pfzY508LbrdFNgyg\ntRfxLSbIgqHOOiAbrkYrrJ42hdNmNWjwdweGXcmoxB283zvyjvHbu6NvD6XgyCbD\nBukfjGrpBeVNU7HedSoHLS01QkMcpviV5JTLVqei1vX8sp/zB1irEldhCzr43G2S\n+rnjQsnW1OQY8PaTgJ/qC0WjVP2aUG82mAjWqPqSbEzl4GpXwIM=\n-----END CERTIFICATE-----\n", "key": "/etc/pki/tls/private/quadlet_demo.key", "key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDPWnBR90E+G7BB\nNcO+8z7Eclxe8Cw2ZzXouGXs8UFt6AvazgjOFEQkcyfOyU5v5RtcnjaSvzMwLpUV\neyhkdYfbE9/DDdk8/ODK5li/pKeQRpN0Z4BA2FoecJ5Wwoc3h1W+c/Di8WENhtih\nieemrMPH/cFhwp8lubSEz2Gm0FM8vAjUwxfF6dvGDS0lKW/xAfLPb0w35JmPt2rr\n0KzTuIcQOb/gm/a7PhK39tV5UYGMqrrsSC1NUX4c0eBIbPWOxdP1SyTNGO/dvkqi\nHc4ujQ95GMpPeowBB0Td5BSE6ONJb0pFKxAj0kVQCuFcDtkqsI1o4wGVEA7DAoM/\nDEbN+rfTAgMBAAECggEAMtzwaGrKaj3IgnZ5ycFJSE71MaSgc8flhWyWNAUKE7e0\nELwxlLuL65UD0sm2n+QHTjTRFjYojE0g7OAFDH94D9lULjibbUZqrdrLMldlcxKc\n4RTydPWdRZJChO261XgxJEK0eDbVo8fMI/P8CvqeG9S3Nw/SkiJ3kYx9W7oGcQMN\nokuqOSomy+lWpxeA2v27ZPrMWW9lx1ptu72j/t/LvzZ2WNKTBTRGg7z4ylDHkjVg\n1vFfxrLP2U7BAQ1d1FSq2WfUI5jhDZkNvmdZQyfGegse7XhsrCqOo54MinC/njBm\nq0OMOUMpoH+Dk89rS0ZKqa/cojFnC+gO506PU6AooQKBgQD30KgVtuASVEU9/pC6\n7k4HPRUwpqtmFoFFKdo63Vz5jMIXluT/DFjVO2kXmHr+3N8F+BkqIj3Dj1GTNXgg\nPQtuJaWI+40NMpAYZNxPYP/2XGMQIg2mjVLoRAAEF6z+i0VV4Z3bPqWvg1jS2zZ7\nTgrqH+aCQ01u95f3Dnq1bt/IdQKBgQDWM6qzgbAJrqxwk71VfQY+NTEAba8uZcfO\nZNjUfnt8tpTOU6MIYBzqjWDDTcybeyqhla9iE4gQOVC+99jjTj+qcaaKHqr1nG7L\nwj0bA0uXVN2WHYLmx7vVVju1lhtwUiH6kqcDInX7d1Ki/idt7zivMmWwRR2gyvwN\nL+iJX+C2JwKBgA6QyRBdq1XodhlquP8DurpaMMCnSzO45ug1yYnkMO7KAy53xAXp\np98AmvC0GaaZPNKQ/omncZHo/pZ0chcS/3T96XjfsC6yZh3I79oKjYDhdUEhf0Pm\n0pSsSoPzEEv1y6F8nVahnANw6Oubztv4flO8hEPryMVY8CmpRDUdyzFxAoGAFOUb\nXMFk47znU2AGEyXMCxtzOUB520PQduC9ihUAhMzmvS3JdJKzn2nFK6gWMoCUSwjn\n/+A54KNGripNPPcwHLdTcJvHP4hQAo5jGZR7tjpwFymv0EVhnFD6KB+Jc65/ZAsU\nQakF607bf8zdGctBM03uP4CugDXs2nh27ZetuvcCgYBmIbcPNyiWwauch92+oGcD\n2OcNmBxDMrnclAjlpEZahWc8TUIUHJ0zY8AhR5fg4jUmCmCnFEU25xxfmi+PovFI\nVf+UVl3Bh1O2ezznFT5k4wzzsFrkp/FhejizAdVuaLdojs5EsZG3O5WCViSp4ZxL\njHYKl3Ub2DfILdivMi1fWA==\n-----END PRIVATE KEY-----\n" } } }, "changed": false } TASK [fedora.linux_system_roles.certificate : Stop tracking certificates] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:176 Saturday 04 January 2025 11:33:15 -0500 (0:00:00.073) 0:00:11.911 ****** ok: [managed-node2] => (item={'cert': '/etc/pki/tls/certs/quadlet_demo.crt', 'cert_content': '-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQCcOX80gFT5qGOVL6MvuL0DANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMDlj\nMzk3ZjMtNDgwNTRmOWEtODYzOTUyZmEtMzJmYjhiY2YwHhcNMjUwMTA0MTYzMzE0\nWhcNMjYwMTA0MTYzMzEzWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPWnBR90E+G7BBNcO+8z7Eclxe8Cw2ZzXo\nuGXs8UFt6AvazgjOFEQkcyfOyU5v5RtcnjaSvzMwLpUVeyhkdYfbE9/DDdk8/ODK\n5li/pKeQRpN0Z4BA2FoecJ5Wwoc3h1W+c/Di8WENhtihieemrMPH/cFhwp8lubSE\nz2Gm0FM8vAjUwxfF6dvGDS0lKW/xAfLPb0w35JmPt2rr0KzTuIcQOb/gm/a7PhK3\n9tV5UYGMqrrsSC1NUX4c0eBIbPWOxdP1SyTNGO/dvkqiHc4ujQ95GMpPeowBB0Td\n5BSE6ONJb0pFKxAj0kVQCuFcDtkqsI1o4wGVEA7DAoM/DEbN+rfTAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU1sa1\nFt63kb3xh2yIwIUSgaE4LI4wHwYDVR0jBBgwFoAUjkYDIKFMYyESUEbKaaP2/M3v\nagUwDQYJKoZIhvcNAQELBQADggEBAHyrh1RGP/Poq98iif0ACJUDew1FgfR3WEc/\nt7lhKoakw0aa1THUFUOrDheeuA5nHc7/aORwlpurbc1XukzCunLFJudtbTaW9vT5\nbm5RA/CUly+fx+caifJcT8jij+e5XznIoevrN2H7CFDJIs71pfzY508LbrdFNgyg\ntRfxLSbIgqHOOiAbrkYrrJ42hdNmNWjwdweGXcmoxB283zvyjvHbu6NvD6XgyCbD\nBukfjGrpBeVNU7HedSoHLS01QkMcpviV5JTLVqei1vX8sp/zB1irEldhCzr43G2S\n+rnjQsnW1OQY8PaTgJ/qC0WjVP2aUG82mAjWqPqSbEzl4GpXwIM=\n-----END CERTIFICATE-----\n', 'key': '/etc/pki/tls/private/quadlet_demo.key', 'key_content': '-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDPWnBR90E+G7BB\nNcO+8z7Eclxe8Cw2ZzXouGXs8UFt6AvazgjOFEQkcyfOyU5v5RtcnjaSvzMwLpUV\neyhkdYfbE9/DDdk8/ODK5li/pKeQRpN0Z4BA2FoecJ5Wwoc3h1W+c/Di8WENhtih\nieemrMPH/cFhwp8lubSEz2Gm0FM8vAjUwxfF6dvGDS0lKW/xAfLPb0w35JmPt2rr\n0KzTuIcQOb/gm/a7PhK39tV5UYGMqrrsSC1NUX4c0eBIbPWOxdP1SyTNGO/dvkqi\nHc4ujQ95GMpPeowBB0Td5BSE6ONJb0pFKxAj0kVQCuFcDtkqsI1o4wGVEA7DAoM/\nDEbN+rfTAgMBAAECggEAMtzwaGrKaj3IgnZ5ycFJSE71MaSgc8flhWyWNAUKE7e0\nELwxlLuL65UD0sm2n+QHTjTRFjYojE0g7OAFDH94D9lULjibbUZqrdrLMldlcxKc\n4RTydPWdRZJChO261XgxJEK0eDbVo8fMI/P8CvqeG9S3Nw/SkiJ3kYx9W7oGcQMN\nokuqOSomy+lWpxeA2v27ZPrMWW9lx1ptu72j/t/LvzZ2WNKTBTRGg7z4ylDHkjVg\n1vFfxrLP2U7BAQ1d1FSq2WfUI5jhDZkNvmdZQyfGegse7XhsrCqOo54MinC/njBm\nq0OMOUMpoH+Dk89rS0ZKqa/cojFnC+gO506PU6AooQKBgQD30KgVtuASVEU9/pC6\n7k4HPRUwpqtmFoFFKdo63Vz5jMIXluT/DFjVO2kXmHr+3N8F+BkqIj3Dj1GTNXgg\nPQtuJaWI+40NMpAYZNxPYP/2XGMQIg2mjVLoRAAEF6z+i0VV4Z3bPqWvg1jS2zZ7\nTgrqH+aCQ01u95f3Dnq1bt/IdQKBgQDWM6qzgbAJrqxwk71VfQY+NTEAba8uZcfO\nZNjUfnt8tpTOU6MIYBzqjWDDTcybeyqhla9iE4gQOVC+99jjTj+qcaaKHqr1nG7L\nwj0bA0uXVN2WHYLmx7vVVju1lhtwUiH6kqcDInX7d1Ki/idt7zivMmWwRR2gyvwN\nL+iJX+C2JwKBgA6QyRBdq1XodhlquP8DurpaMMCnSzO45ug1yYnkMO7KAy53xAXp\np98AmvC0GaaZPNKQ/omncZHo/pZ0chcS/3T96XjfsC6yZh3I79oKjYDhdUEhf0Pm\n0pSsSoPzEEv1y6F8nVahnANw6Oubztv4flO8hEPryMVY8CmpRDUdyzFxAoGAFOUb\nXMFk47znU2AGEyXMCxtzOUB520PQduC9ihUAhMzmvS3JdJKzn2nFK6gWMoCUSwjn\n/+A54KNGripNPPcwHLdTcJvHP4hQAo5jGZR7tjpwFymv0EVhnFD6KB+Jc65/ZAsU\nQakF607bf8zdGctBM03uP4CugDXs2nh27ZetuvcCgYBmIbcPNyiWwauch92+oGcD\n2OcNmBxDMrnclAjlpEZahWc8TUIUHJ0zY8AhR5fg4jUmCmCnFEU25xxfmi+PovFI\nVf+UVl3Bh1O2ezznFT5k4wzzsFrkp/FhejizAdVuaLdojs5EsZG3O5WCViSp4ZxL\njHYKl3Ub2DfILdivMi1fWA==\n-----END PRIVATE KEY-----\n', 'ca': '/etc/pki/tls/certs/quadlet_demo.crt', 'ca_content': '-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQCcOX80gFT5qGOVL6MvuL0DANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMDlj\nMzk3ZjMtNDgwNTRmOWEtODYzOTUyZmEtMzJmYjhiY2YwHhcNMjUwMTA0MTYzMzE0\nWhcNMjYwMTA0MTYzMzEzWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPWnBR90E+G7BBNcO+8z7Eclxe8Cw2ZzXo\nuGXs8UFt6AvazgjOFEQkcyfOyU5v5RtcnjaSvzMwLpUVeyhkdYfbE9/DDdk8/ODK\n5li/pKeQRpN0Z4BA2FoecJ5Wwoc3h1W+c/Di8WENhtihieemrMPH/cFhwp8lubSE\nz2Gm0FM8vAjUwxfF6dvGDS0lKW/xAfLPb0w35JmPt2rr0KzTuIcQOb/gm/a7PhK3\n9tV5UYGMqrrsSC1NUX4c0eBIbPWOxdP1SyTNGO/dvkqiHc4ujQ95GMpPeowBB0Td\n5BSE6ONJb0pFKxAj0kVQCuFcDtkqsI1o4wGVEA7DAoM/DEbN+rfTAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU1sa1\nFt63kb3xh2yIwIUSgaE4LI4wHwYDVR0jBBgwFoAUjkYDIKFMYyESUEbKaaP2/M3v\nagUwDQYJKoZIhvcNAQELBQADggEBAHyrh1RGP/Poq98iif0ACJUDew1FgfR3WEc/\nt7lhKoakw0aa1THUFUOrDheeuA5nHc7/aORwlpurbc1XukzCunLFJudtbTaW9vT5\nbm5RA/CUly+fx+caifJcT8jij+e5XznIoevrN2H7CFDJIs71pfzY508LbrdFNgyg\ntRfxLSbIgqHOOiAbrkYrrJ42hdNmNWjwdweGXcmoxB283zvyjvHbu6NvD6XgyCbD\nBukfjGrpBeVNU7HedSoHLS01QkMcpviV5JTLVqei1vX8sp/zB1irEldhCzr43G2S\n+rnjQsnW1OQY8PaTgJ/qC0WjVP2aUG82mAjWqPqSbEzl4GpXwIM=\n-----END CERTIFICATE-----\n'}) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "getcert", "stop-tracking", "-f", "/etc/pki/tls/certs/quadlet_demo.crt" ], "delta": "0:00:00.026061", "end": "2025-01-04 11:33:16.273090", "item": { "ca": "/etc/pki/tls/certs/quadlet_demo.crt", "ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQCcOX80gFT5qGOVL6MvuL0DANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMDlj\nMzk3ZjMtNDgwNTRmOWEtODYzOTUyZmEtMzJmYjhiY2YwHhcNMjUwMTA0MTYzMzE0\nWhcNMjYwMTA0MTYzMzEzWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPWnBR90E+G7BBNcO+8z7Eclxe8Cw2ZzXo\nuGXs8UFt6AvazgjOFEQkcyfOyU5v5RtcnjaSvzMwLpUVeyhkdYfbE9/DDdk8/ODK\n5li/pKeQRpN0Z4BA2FoecJ5Wwoc3h1W+c/Di8WENhtihieemrMPH/cFhwp8lubSE\nz2Gm0FM8vAjUwxfF6dvGDS0lKW/xAfLPb0w35JmPt2rr0KzTuIcQOb/gm/a7PhK3\n9tV5UYGMqrrsSC1NUX4c0eBIbPWOxdP1SyTNGO/dvkqiHc4ujQ95GMpPeowBB0Td\n5BSE6ONJb0pFKxAj0kVQCuFcDtkqsI1o4wGVEA7DAoM/DEbN+rfTAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU1sa1\nFt63kb3xh2yIwIUSgaE4LI4wHwYDVR0jBBgwFoAUjkYDIKFMYyESUEbKaaP2/M3v\nagUwDQYJKoZIhvcNAQELBQADggEBAHyrh1RGP/Poq98iif0ACJUDew1FgfR3WEc/\nt7lhKoakw0aa1THUFUOrDheeuA5nHc7/aORwlpurbc1XukzCunLFJudtbTaW9vT5\nbm5RA/CUly+fx+caifJcT8jij+e5XznIoevrN2H7CFDJIs71pfzY508LbrdFNgyg\ntRfxLSbIgqHOOiAbrkYrrJ42hdNmNWjwdweGXcmoxB283zvyjvHbu6NvD6XgyCbD\nBukfjGrpBeVNU7HedSoHLS01QkMcpviV5JTLVqei1vX8sp/zB1irEldhCzr43G2S\n+rnjQsnW1OQY8PaTgJ/qC0WjVP2aUG82mAjWqPqSbEzl4GpXwIM=\n-----END CERTIFICATE-----\n", "cert": "/etc/pki/tls/certs/quadlet_demo.crt", "cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQCcOX80gFT5qGOVL6MvuL0DANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMDlj\nMzk3ZjMtNDgwNTRmOWEtODYzOTUyZmEtMzJmYjhiY2YwHhcNMjUwMTA0MTYzMzE0\nWhcNMjYwMTA0MTYzMzEzWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPWnBR90E+G7BBNcO+8z7Eclxe8Cw2ZzXo\nuGXs8UFt6AvazgjOFEQkcyfOyU5v5RtcnjaSvzMwLpUVeyhkdYfbE9/DDdk8/ODK\n5li/pKeQRpN0Z4BA2FoecJ5Wwoc3h1W+c/Di8WENhtihieemrMPH/cFhwp8lubSE\nz2Gm0FM8vAjUwxfF6dvGDS0lKW/xAfLPb0w35JmPt2rr0KzTuIcQOb/gm/a7PhK3\n9tV5UYGMqrrsSC1NUX4c0eBIbPWOxdP1SyTNGO/dvkqiHc4ujQ95GMpPeowBB0Td\n5BSE6ONJb0pFKxAj0kVQCuFcDtkqsI1o4wGVEA7DAoM/DEbN+rfTAgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU1sa1\nFt63kb3xh2yIwIUSgaE4LI4wHwYDVR0jBBgwFoAUjkYDIKFMYyESUEbKaaP2/M3v\nagUwDQYJKoZIhvcNAQELBQADggEBAHyrh1RGP/Poq98iif0ACJUDew1FgfR3WEc/\nt7lhKoakw0aa1THUFUOrDheeuA5nHc7/aORwlpurbc1XukzCunLFJudtbTaW9vT5\nbm5RA/CUly+fx+caifJcT8jij+e5XznIoevrN2H7CFDJIs71pfzY508LbrdFNgyg\ntRfxLSbIgqHOOiAbrkYrrJ42hdNmNWjwdweGXcmoxB283zvyjvHbu6NvD6XgyCbD\nBukfjGrpBeVNU7HedSoHLS01QkMcpviV5JTLVqei1vX8sp/zB1irEldhCzr43G2S\n+rnjQsnW1OQY8PaTgJ/qC0WjVP2aUG82mAjWqPqSbEzl4GpXwIM=\n-----END CERTIFICATE-----\n", "key": "/etc/pki/tls/private/quadlet_demo.key", "key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDPWnBR90E+G7BB\nNcO+8z7Eclxe8Cw2ZzXouGXs8UFt6AvazgjOFEQkcyfOyU5v5RtcnjaSvzMwLpUV\neyhkdYfbE9/DDdk8/ODK5li/pKeQRpN0Z4BA2FoecJ5Wwoc3h1W+c/Di8WENhtih\nieemrMPH/cFhwp8lubSEz2Gm0FM8vAjUwxfF6dvGDS0lKW/xAfLPb0w35JmPt2rr\n0KzTuIcQOb/gm/a7PhK39tV5UYGMqrrsSC1NUX4c0eBIbPWOxdP1SyTNGO/dvkqi\nHc4ujQ95GMpPeowBB0Td5BSE6ONJb0pFKxAj0kVQCuFcDtkqsI1o4wGVEA7DAoM/\nDEbN+rfTAgMBAAECggEAMtzwaGrKaj3IgnZ5ycFJSE71MaSgc8flhWyWNAUKE7e0\nELwxlLuL65UD0sm2n+QHTjTRFjYojE0g7OAFDH94D9lULjibbUZqrdrLMldlcxKc\n4RTydPWdRZJChO261XgxJEK0eDbVo8fMI/P8CvqeG9S3Nw/SkiJ3kYx9W7oGcQMN\nokuqOSomy+lWpxeA2v27ZPrMWW9lx1ptu72j/t/LvzZ2WNKTBTRGg7z4ylDHkjVg\n1vFfxrLP2U7BAQ1d1FSq2WfUI5jhDZkNvmdZQyfGegse7XhsrCqOo54MinC/njBm\nq0OMOUMpoH+Dk89rS0ZKqa/cojFnC+gO506PU6AooQKBgQD30KgVtuASVEU9/pC6\n7k4HPRUwpqtmFoFFKdo63Vz5jMIXluT/DFjVO2kXmHr+3N8F+BkqIj3Dj1GTNXgg\nPQtuJaWI+40NMpAYZNxPYP/2XGMQIg2mjVLoRAAEF6z+i0VV4Z3bPqWvg1jS2zZ7\nTgrqH+aCQ01u95f3Dnq1bt/IdQKBgQDWM6qzgbAJrqxwk71VfQY+NTEAba8uZcfO\nZNjUfnt8tpTOU6MIYBzqjWDDTcybeyqhla9iE4gQOVC+99jjTj+qcaaKHqr1nG7L\nwj0bA0uXVN2WHYLmx7vVVju1lhtwUiH6kqcDInX7d1Ki/idt7zivMmWwRR2gyvwN\nL+iJX+C2JwKBgA6QyRBdq1XodhlquP8DurpaMMCnSzO45ug1yYnkMO7KAy53xAXp\np98AmvC0GaaZPNKQ/omncZHo/pZ0chcS/3T96XjfsC6yZh3I79oKjYDhdUEhf0Pm\n0pSsSoPzEEv1y6F8nVahnANw6Oubztv4flO8hEPryMVY8CmpRDUdyzFxAoGAFOUb\nXMFk47znU2AGEyXMCxtzOUB520PQduC9ihUAhMzmvS3JdJKzn2nFK6gWMoCUSwjn\n/+A54KNGripNPPcwHLdTcJvHP4hQAo5jGZR7tjpwFymv0EVhnFD6KB+Jc65/ZAsU\nQakF607bf8zdGctBM03uP4CugDXs2nh27ZetuvcCgYBmIbcPNyiWwauch92+oGcD\n2OcNmBxDMrnclAjlpEZahWc8TUIUHJ0zY8AhR5fg4jUmCmCnFEU25xxfmi+PovFI\nVf+UVl3Bh1O2ezznFT5k4wzzsFrkp/FhejizAdVuaLdojs5EsZG3O5WCViSp4ZxL\njHYKl3Ub2DfILdivMi1fWA==\n-----END PRIVATE KEY-----\n" }, "rc": 0, "start": "2025-01-04 11:33:16.247029" } STDOUT: Request "20250104163314" removed. TASK [fedora.linux_system_roles.certificate : Remove files] ******************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:181 Saturday 04 January 2025 11:33:16 -0500 (0:00:00.652) 0:00:12.564 ****** changed: [managed-node2] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => { "ansible_loop_var": "item", "changed": true, "item": "/etc/pki/tls/certs/quadlet_demo.crt", "path": "/etc/pki/tls/certs/quadlet_demo.crt", "state": "absent" } changed: [managed-node2] => (item=/etc/pki/tls/private/quadlet_demo.key) => { "ansible_loop_var": "item", "changed": true, "item": "/etc/pki/tls/private/quadlet_demo.key", "path": "/etc/pki/tls/private/quadlet_demo.key", "state": "absent" } ok: [managed-node2] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => { "ansible_loop_var": "item", "changed": false, "item": "/etc/pki/tls/certs/quadlet_demo.crt", "path": "/etc/pki/tls/certs/quadlet_demo.crt", "state": "absent" } TASK [Run the role] ************************************************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:62 Saturday 04 January 2025 11:33:17 -0500 (0:00:01.091) 0:00:13.655 ****** included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 04 January 2025 11:33:17 -0500 (0:00:00.132) 0:00:13.787 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 04 January 2025 11:33:17 -0500 (0:00:00.063) 0:00:13.850 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 04 January 2025 11:33:17 -0500 (0:00:00.065) 0:00:13.916 ****** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 04 January 2025 11:33:18 -0500 (0:00:00.414) 0:00:14.331 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 04 January 2025 11:33:18 -0500 (0:00:00.032) 0:00:14.363 ****** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 04 January 2025 11:33:18 -0500 (0:00:00.371) 0:00:14.734 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 04 January 2025 11:33:18 -0500 (0:00:00.045) 0:00:14.780 ****** ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 04 January 2025 11:33:18 -0500 (0:00:00.082) 0:00:14.862 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 04 January 2025 11:33:19 -0500 (0:00:01.151) 0:00:16.013 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 04 January 2025 11:33:19 -0500 (0:00:00.055) 0:00:16.068 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 04 January 2025 11:33:19 -0500 (0:00:00.049) 0:00:16.118 ****** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 04 January 2025 11:33:19 -0500 (0:00:00.044) 0:00:16.163 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 04 January 2025 11:33:19 -0500 (0:00:00.042) 0:00:16.206 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 04 January 2025 11:33:20 -0500 (0:00:00.070) 0:00:16.276 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.026904", "end": "2025-01-04 11:33:20.394123", "rc": 0, "start": "2025-01-04 11:33:20.367219" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 04 January 2025 11:33:20 -0500 (0:00:00.413) 0:00:16.690 ****** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 04 January 2025 11:33:20 -0500 (0:00:00.030) 0:00:16.721 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 04 January 2025 11:33:20 -0500 (0:00:00.031) 0:00:16.752 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 04 January 2025 11:33:20 -0500 (0:00:00.137) 0:00:16.890 ****** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 04 January 2025 11:33:20 -0500 (0:00:00.177) 0:00:17.067 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 04 January 2025 11:33:20 -0500 (0:00:00.049) 0:00:17.117 ****** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 04 January 2025 11:33:20 -0500 (0:00:00.048) 0:00:17.165 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 04 January 2025 11:33:20 -0500 (0:00:00.056) 0:00:17.222 ****** ok: [managed-node2] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 04 January 2025 11:33:21 -0500 (0:00:00.543) 0:00:17.765 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 04 January 2025 11:33:21 -0500 (0:00:00.058) 0:00:17.823 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 04 January 2025 11:33:21 -0500 (0:00:00.077) 0:00:17.900 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736008187.927243, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736008167.5443802, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3978348631", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 04 January 2025 11:33:22 -0500 (0:00:00.454) 0:00:18.355 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 04 January 2025 11:33:22 -0500 (0:00:00.074) 0:00:18.430 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 04 January 2025 11:33:22 -0500 (0:00:00.054) 0:00:18.484 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 04 January 2025 11:33:22 -0500 (0:00:00.046) 0:00:18.531 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 04 January 2025 11:33:22 -0500 (0:00:00.048) 0:00:18.579 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 04 January 2025 11:33:22 -0500 (0:00:00.045) 0:00:18.625 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 04 January 2025 11:33:22 -0500 (0:00:00.048) 0:00:18.673 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 04 January 2025 11:33:22 -0500 (0:00:00.045) 0:00:18.718 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 04 January 2025 11:33:22 -0500 (0:00:00.048) 0:00:18.767 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 04 January 2025 11:33:22 -0500 (0:00:00.089) 0:00:18.856 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 04 January 2025 11:33:22 -0500 (0:00:00.095) 0:00:18.952 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 04 January 2025 11:33:22 -0500 (0:00:00.081) 0:00:19.033 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 04 January 2025 11:33:22 -0500 (0:00:00.046) 0:00:19.079 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 04 January 2025 11:33:22 -0500 (0:00:00.096) 0:00:19.176 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 04 January 2025 11:33:22 -0500 (0:00:00.049) 0:00:19.226 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 04 January 2025 11:33:23 -0500 (0:00:00.044) 0:00:19.270 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 04 January 2025 11:33:23 -0500 (0:00:00.089) 0:00:19.360 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 04 January 2025 11:33:23 -0500 (0:00:00.049) 0:00:19.409 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 04 January 2025 11:33:23 -0500 (0:00:00.069) 0:00:19.479 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 04 January 2025 11:33:23 -0500 (0:00:00.087) 0:00:19.567 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 04 January 2025 11:33:23 -0500 (0:00:00.035) 0:00:19.602 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 04 January 2025 11:33:23 -0500 (0:00:00.035) 0:00:19.637 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 04 January 2025 11:33:23 -0500 (0:00:00.029) 0:00:19.666 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 04 January 2025 11:33:23 -0500 (0:00:00.030) 0:00:19.697 ****** included: fedora.linux_system_roles.firewall for managed-node2 TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 04 January 2025 11:33:23 -0500 (0:00:00.122) 0:00:19.819 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Saturday 04 January 2025 11:33:23 -0500 (0:00:00.090) 0:00:19.909 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Saturday 04 January 2025 11:33:23 -0500 (0:00:00.109) 0:00:20.019 ****** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Saturday 04 January 2025 11:33:24 -0500 (0:00:00.438) 0:00:20.458 ****** ok: [managed-node2] => { "ansible_facts": { "__firewall_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 04 January 2025 11:33:24 -0500 (0:00:00.054) 0:00:20.513 ****** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Saturday 04 January 2025 11:33:24 -0500 (0:00:00.380) 0:00:20.893 ****** ok: [managed-node2] => { "ansible_facts": { "__firewall_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Saturday 04 January 2025 11:33:24 -0500 (0:00:00.053) 0:00:20.946 ****** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Saturday 04 January 2025 11:33:25 -0500 (0:00:00.737) 0:00:21.684 ****** skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Saturday 04 January 2025 11:33:25 -0500 (0:00:00.055) 0:00:21.739 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Saturday 04 January 2025 11:33:25 -0500 (0:00:00.048) 0:00:21.788 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 04 January 2025 11:33:25 -0500 (0:00:00.062) 0:00:21.851 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Saturday 04 January 2025 11:33:25 -0500 (0:00:00.075) 0:00:21.927 ****** skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Saturday 04 January 2025 11:33:25 -0500 (0:00:00.074) 0:00:22.001 ****** ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-04 11:29:53 EST", "ActiveEnterTimestampMonotonic": "332301083", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "sysinit.target dbus-broker.service system.slice polkit.service basic.target dbus.socket", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-04 11:29:52 EST", "AssertTimestampMonotonic": "332047601", "Before": "shutdown.target network-pre.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "508281000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-04 11:29:52 EST", "ConditionTimestampMonotonic": "332047598", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service ip6tables.service iptables.service ipset.service shutdown.target", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4677", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3698245632", "EffectiveMemoryMax": "3698245632", "EffectiveTasksMax": "22349", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2025-01-04 11:29:52 EST", "ExecMainHandoffTimestampMonotonic": "332075596", "ExecMainPID": "11162", "ExecMainStartTimestamp": "Sat 2025-01-04 11:29:52 EST", "ExecMainStartTimestampMonotonic": "332050215", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-04 11:29:52 EST", "InactiveExitTimestampMonotonic": "332050697", "InvocationID": "c67f28ddb09e45a3b56b475ce0c9fcda", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "11162", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3112353792", "MemoryCurrent": "35246080", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "35508224", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice dbus.socket sysinit.target", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-04 11:33:13 EST", "StateChangeTimestampMonotonic": "532468820", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Saturday 04 January 2025 11:33:26 -0500 (0:00:00.622) 0:00:22.624 ****** ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-04 11:29:53 EST", "ActiveEnterTimestampMonotonic": "332301083", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "sysinit.target dbus-broker.service system.slice polkit.service basic.target dbus.socket", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-04 11:29:52 EST", "AssertTimestampMonotonic": "332047601", "Before": "shutdown.target network-pre.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "508281000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-04 11:29:52 EST", "ConditionTimestampMonotonic": "332047598", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service ip6tables.service iptables.service ipset.service shutdown.target", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4677", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3698245632", "EffectiveMemoryMax": "3698245632", "EffectiveTasksMax": "22349", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2025-01-04 11:29:52 EST", "ExecMainHandoffTimestampMonotonic": "332075596", "ExecMainPID": "11162", "ExecMainStartTimestamp": "Sat 2025-01-04 11:29:52 EST", "ExecMainStartTimestampMonotonic": "332050215", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-04 11:29:52 EST", "InactiveExitTimestampMonotonic": "332050697", "InvocationID": "c67f28ddb09e45a3b56b475ce0c9fcda", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "11162", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3111018496", "MemoryCurrent": "35246080", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "35508224", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice dbus.socket sysinit.target", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-04 11:33:13 EST", "StateChangeTimestampMonotonic": "532468820", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Saturday 04 January 2025 11:33:26 -0500 (0:00:00.588) 0:00:23.212 ****** ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.12", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Saturday 04 January 2025 11:33:27 -0500 (0:00:00.072) 0:00:23.285 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Saturday 04 January 2025 11:33:27 -0500 (0:00:00.043) 0:00:23.328 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Saturday 04 January 2025 11:33:27 -0500 (0:00:00.036) 0:00:23.365 ****** changed: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "port": "8000/tcp", "state": "enabled" } } changed: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "port": "9000/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Saturday 04 January 2025 11:33:28 -0500 (0:00:01.119) 0:00:24.485 ****** skipping: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "8000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "9000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Saturday 04 January 2025 11:33:28 -0500 (0:00:00.107) 0:00:24.592 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall | length == 1", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Saturday 04 January 2025 11:33:28 -0500 (0:00:00.063) 0:00:24.656 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Saturday 04 January 2025 11:33:28 -0500 (0:00:00.059) 0:00:24.715 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Saturday 04 January 2025 11:33:28 -0500 (0:00:00.050) 0:00:24.766 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Saturday 04 January 2025 11:33:28 -0500 (0:00:00.047) 0:00:24.814 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Saturday 04 January 2025 11:33:28 -0500 (0:00:00.119) 0:00:24.933 ****** skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 04 January 2025 11:33:28 -0500 (0:00:00.152) 0:00:25.085 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 04 January 2025 11:33:28 -0500 (0:00:00.072) 0:00:25.158 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 04 January 2025 11:33:29 -0500 (0:00:00.092) 0:00:25.251 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 04 January 2025 11:33:29 -0500 (0:00:00.063) 0:00:25.314 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 04 January 2025 11:33:29 -0500 (0:00:00.061) 0:00:25.376 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 04 January 2025 11:33:29 -0500 (0:00:00.244) 0:00:25.620 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 04 January 2025 11:33:29 -0500 (0:00:00.056) 0:00:25.677 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13 Saturday 04 January 2025 11:33:29 -0500 (0:00:00.063) 0:00:25.740 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 04 January 2025 11:33:29 -0500 (0:00:00.121) 0:00:25.861 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 04 January 2025 11:33:29 -0500 (0:00:00.061) 0:00:25.923 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 04 January 2025 11:33:29 -0500 (0:00:00.043) 0:00:25.966 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18 Saturday 04 January 2025 11:33:29 -0500 (0:00:00.042) 0:00:26.009 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34 Saturday 04 January 2025 11:33:29 -0500 (0:00:00.035) 0:00:26.044 ****** [WARNING]: Using a variable for a task's 'args' is unsafe in some situations (see https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat- unsafe) changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 04 January 2025 11:33:30 -0500 (0:00:00.566) 0:00:26.611 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 04 January 2025 11:33:30 -0500 (0:00:00.039) 0:00:26.651 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13 Saturday 04 January 2025 11:33:30 -0500 (0:00:00.039) 0:00:26.690 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 04 January 2025 11:33:30 -0500 (0:00:00.086) 0:00:26.777 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 04 January 2025 11:33:30 -0500 (0:00:00.047) 0:00:26.824 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 04 January 2025 11:33:30 -0500 (0:00:00.040) 0:00:26.865 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18 Saturday 04 January 2025 11:33:30 -0500 (0:00:00.032) 0:00:26.897 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34 Saturday 04 January 2025 11:33:30 -0500 (0:00:00.029) 0:00:26.926 ****** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 04 January 2025 11:33:31 -0500 (0:00:00.523) 0:00:27.450 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 04 January 2025 11:33:31 -0500 (0:00:00.058) 0:00:27.508 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13 Saturday 04 January 2025 11:33:31 -0500 (0:00:00.063) 0:00:27.572 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 04 January 2025 11:33:31 -0500 (0:00:00.123) 0:00:27.696 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 04 January 2025 11:33:31 -0500 (0:00:00.062) 0:00:27.758 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 04 January 2025 11:33:31 -0500 (0:00:00.054) 0:00:27.813 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18 Saturday 04 January 2025 11:33:31 -0500 (0:00:00.072) 0:00:27.885 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34 Saturday 04 January 2025 11:33:31 -0500 (0:00:00.060) 0:00:27.946 ****** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 04 January 2025 11:33:32 -0500 (0:00:00.572) 0:00:28.518 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 04 January 2025 11:33:32 -0500 (0:00:00.025) 0:00:28.544 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 04 January 2025 11:33:32 -0500 (0:00:00.146) 0:00:28.690 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.network", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Network]\nSubnet=192.168.30.0/24\nGateway=192.168.30.1\nLabel=app=wordpress", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 04 January 2025 11:33:32 -0500 (0:00:00.070) 0:00:28.761 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 04 January 2025 11:33:32 -0500 (0:00:00.053) 0:00:28.815 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 04 January 2025 11:33:32 -0500 (0:00:00.120) 0:00:28.935 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "network", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 04 January 2025 11:33:32 -0500 (0:00:00.074) 0:00:29.010 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 04 January 2025 11:33:32 -0500 (0:00:00.062) 0:00:29.072 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 04 January 2025 11:33:32 -0500 (0:00:00.067) 0:00:29.140 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 04 January 2025 11:33:32 -0500 (0:00:00.036) 0:00:29.176 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 04 January 2025 11:33:32 -0500 (0:00:00.043) 0:00:29.219 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736008187.927243, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736008167.5443802, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3978348631", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 04 January 2025 11:33:33 -0500 (0:00:00.365) 0:00:29.585 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 04 January 2025 11:33:33 -0500 (0:00:00.033) 0:00:29.618 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 04 January 2025 11:33:33 -0500 (0:00:00.037) 0:00:29.655 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 04 January 2025 11:33:33 -0500 (0:00:00.031) 0:00:29.687 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 04 January 2025 11:33:33 -0500 (0:00:00.030) 0:00:29.718 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 04 January 2025 11:33:33 -0500 (0:00:00.041) 0:00:29.759 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 04 January 2025 11:33:33 -0500 (0:00:00.045) 0:00:29.804 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 04 January 2025 11:33:33 -0500 (0:00:00.038) 0:00:29.843 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 04 January 2025 11:33:33 -0500 (0:00:00.032) 0:00:29.875 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-network.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 04 January 2025 11:33:33 -0500 (0:00:00.063) 0:00:29.939 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 04 January 2025 11:33:33 -0500 (0:00:00.035) 0:00:29.974 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 04 January 2025 11:33:33 -0500 (0:00:00.034) 0:00:30.009 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 04 January 2025 11:33:33 -0500 (0:00:00.074) 0:00:30.084 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 04 January 2025 11:33:33 -0500 (0:00:00.035) 0:00:30.119 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 04 January 2025 11:33:33 -0500 (0:00:00.031) 0:00:30.151 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 04 January 2025 11:33:34 -0500 (0:00:00.119) 0:00:30.270 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 04 January 2025 11:33:34 -0500 (0:00:00.073) 0:00:30.344 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 04 January 2025 11:33:34 -0500 (0:00:00.040) 0:00:30.384 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 04 January 2025 11:33:34 -0500 (0:00:00.033) 0:00:30.418 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 04 January 2025 11:33:34 -0500 (0:00:00.034) 0:00:30.453 ****** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 04 January 2025 11:33:34 -0500 (0:00:00.034) 0:00:30.487 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 04 January 2025 11:33:34 -0500 (0:00:00.032) 0:00:30.519 ****** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 04 January 2025 11:33:34 -0500 (0:00:00.396) 0:00:30.915 ****** changed: [managed-node2] => { "changed": true, "checksum": "e57c08d49aff4bae8daab138d913aeddaa8682a0", "dest": "/etc/containers/systemd/quadlet-demo.network", "gid": 0, "group": "root", "md5sum": "061f3cf318cbd8ab5794bb1173831fb8", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 74, "src": "/root/.ansible/tmp/ansible-tmp-1736008414.7326126-18841-108046773840670/.source.network", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 04 January 2025 11:33:35 -0500 (0:00:00.821) 0:00:31.737 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 04 January 2025 11:33:35 -0500 (0:00:00.035) 0:00:31.772 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 04 January 2025 11:33:35 -0500 (0:00:00.046) 0:00:31.818 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 04 January 2025 11:33:36 -0500 (0:00:00.763) 0:00:32.581 ****** changed: [managed-node2] => { "changed": true, "name": "quadlet-demo-network.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "-.mount sysinit.target system.slice network-online.target basic.target systemd-journald.socket", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698245632", "EffectiveMemoryMax": "3698245632", "EffectiveTasksMax": "22349", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3093618688", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "sysinit.target system.slice -.mount", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.network", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 04 January 2025 11:33:36 -0500 (0:00:00.614) 0:00:33.196 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 04 January 2025 11:33:36 -0500 (0:00:00.031) 0:00:33.228 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo-mysql.volume", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Volume]", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 04 January 2025 11:33:37 -0500 (0:00:00.040) 0:00:33.268 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 04 January 2025 11:33:37 -0500 (0:00:00.037) 0:00:33.305 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 04 January 2025 11:33:37 -0500 (0:00:00.063) 0:00:33.369 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "volume", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 04 January 2025 11:33:37 -0500 (0:00:00.072) 0:00:33.442 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 04 January 2025 11:33:37 -0500 (0:00:00.100) 0:00:33.543 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 04 January 2025 11:33:37 -0500 (0:00:00.037) 0:00:33.580 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 04 January 2025 11:33:37 -0500 (0:00:00.033) 0:00:33.614 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 04 January 2025 11:33:37 -0500 (0:00:00.039) 0:00:33.654 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736008187.927243, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736008167.5443802, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3978348631", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 04 January 2025 11:33:37 -0500 (0:00:00.361) 0:00:34.016 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 04 January 2025 11:33:37 -0500 (0:00:00.028) 0:00:34.044 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 04 January 2025 11:33:37 -0500 (0:00:00.026) 0:00:34.071 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 04 January 2025 11:33:37 -0500 (0:00:00.029) 0:00:34.100 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 04 January 2025 11:33:37 -0500 (0:00:00.040) 0:00:34.141 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 04 January 2025 11:33:37 -0500 (0:00:00.043) 0:00:34.185 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 04 January 2025 11:33:38 -0500 (0:00:00.083) 0:00:34.268 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 04 January 2025 11:33:38 -0500 (0:00:00.100) 0:00:34.369 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 04 January 2025 11:33:38 -0500 (0:00:00.075) 0:00:34.445 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql-volume.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 04 January 2025 11:33:38 -0500 (0:00:00.161) 0:00:34.606 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 04 January 2025 11:33:38 -0500 (0:00:00.057) 0:00:34.663 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 04 January 2025 11:33:38 -0500 (0:00:00.075) 0:00:34.739 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 04 January 2025 11:33:38 -0500 (0:00:00.167) 0:00:34.906 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 04 January 2025 11:33:38 -0500 (0:00:00.118) 0:00:35.024 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 04 January 2025 11:33:38 -0500 (0:00:00.095) 0:00:35.119 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 04 January 2025 11:33:39 -0500 (0:00:00.199) 0:00:35.318 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 04 January 2025 11:33:39 -0500 (0:00:00.156) 0:00:35.475 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 04 January 2025 11:33:39 -0500 (0:00:00.052) 0:00:35.527 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 04 January 2025 11:33:39 -0500 (0:00:00.051) 0:00:35.579 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 04 January 2025 11:33:39 -0500 (0:00:00.033) 0:00:35.613 ****** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 04 January 2025 11:33:39 -0500 (0:00:00.027) 0:00:35.640 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 04 January 2025 11:33:39 -0500 (0:00:00.028) 0:00:35.669 ****** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 34, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 04 January 2025 11:33:39 -0500 (0:00:00.387) 0:00:36.057 ****** changed: [managed-node2] => { "changed": true, "checksum": "585f8cbdf0ec73000f9227dcffbef71e9552ea4a", "dest": "/etc/containers/systemd/quadlet-demo-mysql.volume", "gid": 0, "group": "root", "md5sum": "5ddd03a022aeb4502d9bc8ce436b4233", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 9, "src": "/root/.ansible/tmp/ansible-tmp-1736008419.8707628-19085-91622550066190/.source.volume", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 04 January 2025 11:33:40 -0500 (0:00:00.702) 0:00:36.760 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 04 January 2025 11:33:40 -0500 (0:00:00.053) 0:00:36.813 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 04 January 2025 11:33:40 -0500 (0:00:00.050) 0:00:36.864 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 04 January 2025 11:33:41 -0500 (0:00:00.844) 0:00:37.709 ****** changed: [managed-node2] => { "changed": true, "name": "quadlet-demo-mysql-volume.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "system.slice -.mount network-online.target basic.target systemd-journald.socket sysinit.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-mysql-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698245632", "EffectiveMemoryMax": "3698245632", "EffectiveTasksMax": "22349", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3085766656", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "sysinit.target system.slice -.mount", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.volume", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 04 January 2025 11:33:42 -0500 (0:00:00.639) 0:00:38.348 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 04 January 2025 11:33:42 -0500 (0:00:00.036) 0:00:38.385 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Container]\nImage=quay.io/linux-system-roles/mysql:5.6\nContainerName=quadlet-demo-mysql\nVolume=quadlet-demo-mysql.volume:/var/lib/mysql\nVolume=/tmp/quadlet_demo:/var/lib/quadlet_demo:Z\nNetwork=quadlet-demo.network\nSecret=mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD\nHealthCmd=/bin/true\nHealthOnFailure=kill\n", "__podman_quadlet_template_src": "quadlet-demo-mysql.container.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 04 January 2025 11:33:42 -0500 (0:00:00.098) 0:00:38.484 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 04 January 2025 11:33:42 -0500 (0:00:00.056) 0:00:38.540 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 04 January 2025 11:33:42 -0500 (0:00:00.038) 0:00:38.578 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 04 January 2025 11:33:42 -0500 (0:00:00.050) 0:00:38.628 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 04 January 2025 11:33:42 -0500 (0:00:00.066) 0:00:38.695 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 04 January 2025 11:33:42 -0500 (0:00:00.032) 0:00:38.727 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 04 January 2025 11:33:42 -0500 (0:00:00.035) 0:00:38.763 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 04 January 2025 11:33:42 -0500 (0:00:00.191) 0:00:38.954 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736008187.927243, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736008167.5443802, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3978348631", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 04 January 2025 11:33:43 -0500 (0:00:00.386) 0:00:39.340 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 04 January 2025 11:33:43 -0500 (0:00:00.038) 0:00:39.378 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 04 January 2025 11:33:43 -0500 (0:00:00.073) 0:00:39.452 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 04 January 2025 11:33:43 -0500 (0:00:00.095) 0:00:39.547 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 04 January 2025 11:33:43 -0500 (0:00:00.067) 0:00:39.614 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 04 January 2025 11:33:43 -0500 (0:00:00.084) 0:00:39.699 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 04 January 2025 11:33:43 -0500 (0:00:00.081) 0:00:39.780 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 04 January 2025 11:33:43 -0500 (0:00:00.059) 0:00:39.839 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 04 January 2025 11:33:43 -0500 (0:00:00.086) 0:00:39.926 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 04 January 2025 11:33:43 -0500 (0:00:00.089) 0:00:40.015 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 04 January 2025 11:33:43 -0500 (0:00:00.050) 0:00:40.065 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 04 January 2025 11:33:43 -0500 (0:00:00.047) 0:00:40.113 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.container", "__podman_volumes": [ "/tmp/quadlet_demo" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 04 January 2025 11:33:44 -0500 (0:00:00.179) 0:00:40.292 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 04 January 2025 11:33:44 -0500 (0:00:00.073) 0:00:40.366 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 04 January 2025 11:33:44 -0500 (0:00:00.060) 0:00:40.426 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 04 January 2025 11:33:44 -0500 (0:00:00.130) 0:00:40.556 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 04 January 2025 11:33:44 -0500 (0:00:00.093) 0:00:40.650 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 04 January 2025 11:33:44 -0500 (0:00:00.045) 0:00:40.696 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 04 January 2025 11:33:44 -0500 (0:00:00.089) 0:00:40.785 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 04 January 2025 11:33:44 -0500 (0:00:00.045) 0:00:40.831 ****** changed: [managed-node2] => (item=/tmp/quadlet_demo) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/tmp/quadlet_demo", "mode": "0777", "owner": "root", "path": "/tmp/quadlet_demo", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 04 January 2025 11:33:45 -0500 (0:00:00.416) 0:00:41.247 ****** changed: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 04 January 2025 11:33:51 -0500 (0:00:06.586) 0:00:47.833 ****** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 67, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 04 January 2025 11:33:51 -0500 (0:00:00.368) 0:00:48.202 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 04 January 2025 11:33:51 -0500 (0:00:00.029) 0:00:48.231 ****** changed: [managed-node2] => { "changed": true, "checksum": "ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4", "dest": "/etc/containers/systemd/quadlet-demo-mysql.container", "gid": 0, "group": "root", "md5sum": "341b473056d2a5dfa35970b0d2e23a5d", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 363, "src": "/root/.ansible/tmp/ansible-tmp-1736008432.0441234-19575-189994622198139/.source.container", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 04 January 2025 11:33:52 -0500 (0:00:00.692) 0:00:48.924 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_content is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 04 January 2025 11:33:52 -0500 (0:00:00.047) 0:00:48.971 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 04 January 2025 11:33:53 -0500 (0:00:00.785) 0:00:49.756 ****** changed: [managed-node2] => { "changed": true, "name": "quadlet-demo-mysql.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "basic.target systemd-journald.socket system.slice -.mount network-online.target sysinit.target quadlet-demo-mysql-volume.service tmp.mount quadlet-demo-network.service", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "multi-user.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-demo-mysql.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698245632", "EffectiveMemoryMax": "3698245632", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2944622592", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "quadlet-demo-mysql-volume.service quadlet-demo-network.service -.mount sysinit.target system.slice", "RequiresMountsFor": "/tmp/quadlet_demo /run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 04 January 2025 11:33:54 -0500 (0:00:00.833) 0:00:50.590 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 04 January 2025 11:33:54 -0500 (0:00:00.033) 0:00:50.623 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "envoy-proxy-configmap.yml", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: ConfigMap\nmetadata:\n name: envoy-proxy-config\ndata:\n envoy.yaml: |\n admin:\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 9901\n\n static_resources:\n listeners:\n - name: listener_0\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 8080\n filter_chains:\n - filters:\n - name: envoy.filters.network.http_connection_manager\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager\n stat_prefix: ingress_http\n codec_type: AUTO\n route_config:\n name: local_route\n virtual_hosts:\n - name: local_service\n domains: [\"*\"]\n routes:\n - match:\n prefix: \"/\"\n route:\n cluster: backend\n http_filters:\n - name: envoy.filters.http.router\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router\n transport_socket:\n name: envoy.transport_sockets.tls\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext\n common_tls_context:\n tls_certificates:\n - certificate_chain:\n filename: /etc/envoy-certificates/certificate.pem\n private_key:\n filename: /etc/envoy-certificates/certificate.key\n clusters:\n - name: backend\n connect_timeout: 5s\n type: STATIC\n dns_refresh_rate: 1800s\n lb_policy: ROUND_ROBIN\n load_assignment:\n cluster_name: backend\n endpoints:\n - lb_endpoints:\n - endpoint:\n address:\n socket_address:\n address: 127.0.0.1\n port_value: 80", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 04 January 2025 11:33:54 -0500 (0:00:00.040) 0:00:50.664 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 04 January 2025 11:33:54 -0500 (0:00:00.038) 0:00:50.702 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 04 January 2025 11:33:54 -0500 (0:00:00.031) 0:00:50.733 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "envoy-proxy-configmap", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 04 January 2025 11:33:54 -0500 (0:00:00.045) 0:00:50.778 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 04 January 2025 11:33:54 -0500 (0:00:00.065) 0:00:50.844 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 04 January 2025 11:33:54 -0500 (0:00:00.053) 0:00:50.897 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 04 January 2025 11:33:54 -0500 (0:00:00.051) 0:00:50.949 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 04 January 2025 11:33:54 -0500 (0:00:00.061) 0:00:51.010 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736008187.927243, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736008167.5443802, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3978348631", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 04 January 2025 11:33:55 -0500 (0:00:00.492) 0:00:51.503 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 04 January 2025 11:33:55 -0500 (0:00:00.082) 0:00:51.585 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 04 January 2025 11:33:55 -0500 (0:00:00.034) 0:00:51.619 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 04 January 2025 11:33:55 -0500 (0:00:00.033) 0:00:51.653 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 04 January 2025 11:33:55 -0500 (0:00:00.028) 0:00:51.682 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 04 January 2025 11:33:55 -0500 (0:00:00.028) 0:00:51.710 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 04 January 2025 11:33:55 -0500 (0:00:00.027) 0:00:51.738 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 04 January 2025 11:33:55 -0500 (0:00:00.027) 0:00:51.766 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 04 January 2025 11:33:55 -0500 (0:00:00.027) 0:00:51.793 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 04 January 2025 11:33:55 -0500 (0:00:00.050) 0:00:51.844 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 04 January 2025 11:33:55 -0500 (0:00:00.038) 0:00:51.882 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 04 January 2025 11:33:55 -0500 (0:00:00.048) 0:00:51.930 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/envoy-proxy-configmap.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 04 January 2025 11:33:55 -0500 (0:00:00.089) 0:00:52.019 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 04 January 2025 11:33:55 -0500 (0:00:00.047) 0:00:52.067 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 04 January 2025 11:33:55 -0500 (0:00:00.031) 0:00:52.098 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 04 January 2025 11:33:55 -0500 (0:00:00.070) 0:00:52.168 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 04 January 2025 11:33:55 -0500 (0:00:00.049) 0:00:52.217 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 04 January 2025 11:33:56 -0500 (0:00:00.029) 0:00:52.246 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 04 January 2025 11:33:56 -0500 (0:00:00.027) 0:00:52.274 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 04 January 2025 11:33:56 -0500 (0:00:00.027) 0:00:52.302 ****** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 04 January 2025 11:33:56 -0500 (0:00:00.025) 0:00:52.328 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 04 January 2025 11:33:56 -0500 (0:00:00.065) 0:00:52.394 ****** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 103, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 04 January 2025 11:33:56 -0500 (0:00:00.386) 0:00:52.781 ****** changed: [managed-node2] => { "changed": true, "checksum": "d681c7d56f912150d041873e880818b22a90c188", "dest": "/etc/containers/systemd/envoy-proxy-configmap.yml", "gid": 0, "group": "root", "md5sum": "aec75d972c231aac004e1338934544cf", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 2102, "src": "/root/.ansible/tmp/ansible-tmp-1736008436.5920036-19781-220862317646466/.source.yml", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 04 January 2025 11:33:57 -0500 (0:00:00.734) 0:00:53.516 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 04 January 2025 11:33:57 -0500 (0:00:00.030) 0:00:53.547 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 04 January 2025 11:33:57 -0500 (0:00:00.029) 0:00:53.576 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 04 January 2025 11:33:58 -0500 (0:00:00.784) 0:00:54.361 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 04 January 2025 11:33:58 -0500 (0:00:00.058) 0:00:54.420 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 04 January 2025 11:33:58 -0500 (0:00:00.045) 0:00:54.465 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: PersistentVolumeClaim\nmetadata:\n name: wp-pv-claim\n labels:\n app: wordpress\nspec:\n accessModes:\n - ReadWriteOnce\n resources:\n requests:\n storage: 20Gi\n---\napiVersion: v1\nkind: Pod\nmetadata:\n name: quadlet-demo\nspec:\n containers:\n - name: wordpress\n image: quay.io/linux-system-roles/wordpress:4.8-apache\n env:\n - name: WORDPRESS_DB_HOST\n value: quadlet-demo-mysql\n - name: WORDPRESS_DB_PASSWORD\n valueFrom:\n secretKeyRef:\n name: mysql-root-password-kube\n key: password\n volumeMounts:\n - name: wordpress-persistent-storage\n mountPath: /var/www/html\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n - name: envoy\n image: quay.io/linux-system-roles/envoyproxy:v1.25.0\n volumeMounts:\n - name: config-volume\n mountPath: /etc/envoy\n - name: certificates\n mountPath: /etc/envoy-certificates\n env:\n - name: ENVOY_UID\n value: \"0\"\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n volumes:\n - name: config-volume\n configMap:\n name: envoy-proxy-config\n - name: certificates\n secret:\n secretName: envoy-certificates\n - name: wordpress-persistent-storage\n persistentVolumeClaim:\n claimName: wp-pv-claim\n - name: www # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3\n - name: create # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3-create\n", "__podman_quadlet_template_src": "quadlet-demo.yml.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 04 January 2025 11:33:58 -0500 (0:00:00.127) 0:00:54.592 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 04 January 2025 11:33:58 -0500 (0:00:00.045) 0:00:54.638 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 04 January 2025 11:33:58 -0500 (0:00:00.033) 0:00:54.672 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 04 January 2025 11:33:58 -0500 (0:00:00.043) 0:00:54.716 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 04 January 2025 11:33:58 -0500 (0:00:00.055) 0:00:54.771 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 04 January 2025 11:33:58 -0500 (0:00:00.032) 0:00:54.804 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 04 January 2025 11:33:58 -0500 (0:00:00.031) 0:00:54.836 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 04 January 2025 11:33:58 -0500 (0:00:00.040) 0:00:54.877 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736008187.927243, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736008167.5443802, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3978348631", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 04 January 2025 11:33:59 -0500 (0:00:00.368) 0:00:55.245 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 04 January 2025 11:33:59 -0500 (0:00:00.029) 0:00:55.275 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 04 January 2025 11:33:59 -0500 (0:00:00.027) 0:00:55.302 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 04 January 2025 11:33:59 -0500 (0:00:00.029) 0:00:55.332 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 04 January 2025 11:33:59 -0500 (0:00:00.029) 0:00:55.361 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 04 January 2025 11:33:59 -0500 (0:00:00.089) 0:00:55.451 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 04 January 2025 11:33:59 -0500 (0:00:00.041) 0:00:55.492 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 04 January 2025 11:33:59 -0500 (0:00:00.032) 0:00:55.525 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 04 January 2025 11:33:59 -0500 (0:00:00.040) 0:00:55.566 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 04 January 2025 11:33:59 -0500 (0:00:00.072) 0:00:55.639 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 04 January 2025 11:33:59 -0500 (0:00:00.035) 0:00:55.674 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 04 January 2025 11:33:59 -0500 (0:00:00.033) 0:00:55.707 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 04 January 2025 11:33:59 -0500 (0:00:00.071) 0:00:55.778 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 04 January 2025 11:33:59 -0500 (0:00:00.036) 0:00:55.814 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 04 January 2025 11:33:59 -0500 (0:00:00.026) 0:00:55.841 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 04 January 2025 11:33:59 -0500 (0:00:00.061) 0:00:55.903 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 04 January 2025 11:33:59 -0500 (0:00:00.069) 0:00:55.973 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 04 January 2025 11:33:59 -0500 (0:00:00.046) 0:00:56.020 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 04 January 2025 11:33:59 -0500 (0:00:00.033) 0:00:56.053 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 04 January 2025 11:33:59 -0500 (0:00:00.033) 0:00:56.087 ****** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 04 January 2025 11:33:59 -0500 (0:00:00.033) 0:00:56.120 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 04 January 2025 11:33:59 -0500 (0:00:00.031) 0:00:56.152 ****** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 136, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 04 January 2025 11:34:00 -0500 (0:00:00.446) 0:00:56.599 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 04 January 2025 11:34:00 -0500 (0:00:00.056) 0:00:56.655 ****** changed: [managed-node2] => { "changed": true, "checksum": "998dccde0483b1654327a46ddd89cbaa47650370", "dest": "/etc/containers/systemd/quadlet-demo.yml", "gid": 0, "group": "root", "md5sum": "fd890594adfc24339cb9cdc5e7b19a66", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 1605, "src": "/root/.ansible/tmp/ansible-tmp-1736008440.4832172-19964-76184447797368/.source.yml", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 04 January 2025 11:34:01 -0500 (0:00:00.986) 0:00:57.642 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_content is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 04 January 2025 11:34:01 -0500 (0:00:00.087) 0:00:57.730 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 04 January 2025 11:34:02 -0500 (0:00:00.865) 0:00:58.595 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 04 January 2025 11:34:02 -0500 (0:00:00.035) 0:00:58.630 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 04 January 2025 11:34:02 -0500 (0:00:00.042) 0:00:58.673 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.kube", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 04 January 2025 11:34:02 -0500 (0:00:00.074) 0:00:58.748 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 04 January 2025 11:34:02 -0500 (0:00:00.062) 0:00:58.810 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 04 January 2025 11:34:02 -0500 (0:00:00.053) 0:00:58.864 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "kube", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 04 January 2025 11:34:02 -0500 (0:00:00.077) 0:00:58.941 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 04 January 2025 11:34:02 -0500 (0:00:00.097) 0:00:59.039 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 04 January 2025 11:34:02 -0500 (0:00:00.058) 0:00:59.097 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 04 January 2025 11:34:02 -0500 (0:00:00.062) 0:00:59.160 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 04 January 2025 11:34:02 -0500 (0:00:00.056) 0:00:59.216 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736008187.927243, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736008167.5443802, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3978348631", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 04 January 2025 11:34:03 -0500 (0:00:00.397) 0:00:59.614 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 04 January 2025 11:34:03 -0500 (0:00:00.039) 0:00:59.654 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 04 January 2025 11:34:03 -0500 (0:00:00.042) 0:00:59.696 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 04 January 2025 11:34:03 -0500 (0:00:00.049) 0:00:59.745 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 04 January 2025 11:34:03 -0500 (0:00:00.074) 0:00:59.819 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 04 January 2025 11:34:03 -0500 (0:00:00.049) 0:00:59.868 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 04 January 2025 11:34:03 -0500 (0:00:00.059) 0:00:59.928 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 04 January 2025 11:34:03 -0500 (0:00:00.045) 0:00:59.974 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 04 January 2025 11:34:03 -0500 (0:00:00.077) 0:01:00.051 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": [ "quadlet-demo.yml" ], "__podman_service_name": "quadlet-demo.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 04 January 2025 11:34:03 -0500 (0:00:00.059) 0:01:00.110 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 04 January 2025 11:34:03 -0500 (0:00:00.034) 0:01:00.145 ****** ok: [managed-node2] => { "changed": false, "content": "LS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBlcnNpc3RlbnRWb2x1bWVDbGFpbQptZXRhZGF0YToKICBuYW1lOiB3cC1wdi1jbGFpbQogIGxhYmVsczoKICAgIGFwcDogd29yZHByZXNzCnNwZWM6CiAgYWNjZXNzTW9kZXM6CiAgLSBSZWFkV3JpdGVPbmNlCiAgcmVzb3VyY2VzOgogICAgcmVxdWVzdHM6CiAgICAgIHN0b3JhZ2U6IDIwR2kKLS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBvZAptZXRhZGF0YToKICBuYW1lOiBxdWFkbGV0LWRlbW8Kc3BlYzoKICBjb250YWluZXJzOgogIC0gbmFtZTogd29yZHByZXNzCiAgICBpbWFnZTogcXVheS5pby9saW51eC1zeXN0ZW0tcm9sZXMvd29yZHByZXNzOjQuOC1hcGFjaGUKICAgIGVudjoKICAgIC0gbmFtZTogV09SRFBSRVNTX0RCX0hPU1QKICAgICAgdmFsdWU6IHF1YWRsZXQtZGVtby1teXNxbAogICAgLSBuYW1lOiBXT1JEUFJFU1NfREJfUEFTU1dPUkQKICAgICAgdmFsdWVGcm9tOgogICAgICAgIHNlY3JldEtleVJlZjoKICAgICAgICAgIG5hbWU6IG15c3FsLXJvb3QtcGFzc3dvcmQta3ViZQogICAgICAgICAga2V5OiBwYXNzd29yZAogICAgdm9sdW1lTW91bnRzOgogICAgLSBuYW1lOiB3b3JkcHJlc3MtcGVyc2lzdGVudC1zdG9yYWdlCiAgICAgIG1vdW50UGF0aDogL3Zhci93d3cvaHRtbAogICAgcmVzb3VyY2VzOgogICAgICByZXF1ZXN0czoKICAgICAgICBtZW1vcnk6ICI2NE1pIgogICAgICAgIGNwdTogIjI1MG0iCiAgICAgIGxpbWl0czoKICAgICAgICBtZW1vcnk6ICIxMjhNaSIKICAgICAgICBjcHU6ICI1MDBtIgogIC0gbmFtZTogZW52b3kKICAgIGltYWdlOiBxdWF5LmlvL2xpbnV4LXN5c3RlbS1yb2xlcy9lbnZveXByb3h5OnYxLjI1LjAKICAgIHZvbHVtZU1vdW50czoKICAgIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgICBtb3VudFBhdGg6IC9ldGMvZW52b3kKICAgIC0gbmFtZTogY2VydGlmaWNhdGVzCiAgICAgIG1vdW50UGF0aDogL2V0Yy9lbnZveS1jZXJ0aWZpY2F0ZXMKICAgIGVudjoKICAgIC0gbmFtZTogRU5WT1lfVUlECiAgICAgIHZhbHVlOiAiMCIKICAgIHJlc291cmNlczoKICAgICAgcmVxdWVzdHM6CiAgICAgICAgbWVtb3J5OiAiNjRNaSIKICAgICAgICBjcHU6ICIyNTBtIgogICAgICBsaW1pdHM6CiAgICAgICAgbWVtb3J5OiAiMTI4TWkiCiAgICAgICAgY3B1OiAiNTAwbSIKICB2b2x1bWVzOgogIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgY29uZmlnTWFwOgogICAgICBuYW1lOiBlbnZveS1wcm94eS1jb25maWcKICAtIG5hbWU6IGNlcnRpZmljYXRlcwogICAgc2VjcmV0OgogICAgICBzZWNyZXROYW1lOiBlbnZveS1jZXJ0aWZpY2F0ZXMKICAtIG5hbWU6IHdvcmRwcmVzcy1wZXJzaXN0ZW50LXN0b3JhZ2UKICAgIHBlcnNpc3RlbnRWb2x1bWVDbGFpbToKICAgICAgY2xhaW1OYW1lOiB3cC1wdi1jbGFpbQogIC0gbmFtZTogd3d3ICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMwogIC0gbmFtZTogY3JlYXRlICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMy1jcmVhdGUK", "encoding": "base64", "source": "/etc/containers/systemd/quadlet-demo.yml" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 04 January 2025 11:34:04 -0500 (0:00:00.461) 0:01:00.606 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/wordpress:4.8-apache", "quay.io/linux-system-roles/envoyproxy:v1.25.0" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube", "__podman_volumes": [ "/tmp/httpd3", "/tmp/httpd3-create" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 04 January 2025 11:34:04 -0500 (0:00:00.101) 0:01:00.707 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 04 January 2025 11:34:04 -0500 (0:00:00.056) 0:01:00.764 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 04 January 2025 11:34:04 -0500 (0:00:00.039) 0:01:00.803 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 04 January 2025 11:34:04 -0500 (0:00:00.101) 0:01:00.905 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 04 January 2025 11:34:04 -0500 (0:00:00.065) 0:01:00.971 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 04 January 2025 11:34:04 -0500 (0:00:00.032) 0:01:01.004 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 04 January 2025 11:34:04 -0500 (0:00:00.030) 0:01:01.034 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 04 January 2025 11:34:04 -0500 (0:00:00.029) 0:01:01.063 ****** changed: [managed-node2] => (item=/tmp/httpd3) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/tmp/httpd3", "mode": "0755", "owner": "root", "path": "/tmp/httpd3", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } changed: [managed-node2] => (item=/tmp/httpd3-create) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/tmp/httpd3-create", "mode": "0755", "owner": "root", "path": "/tmp/httpd3-create", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 04 January 2025 11:34:05 -0500 (0:00:00.723) 0:01:01.787 ****** changed: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 04 January 2025 11:34:22 -0500 (0:00:17.036) 0:01:18.823 ****** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 160, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 04 January 2025 11:34:22 -0500 (0:00:00.368) 0:01:19.192 ****** changed: [managed-node2] => { "changed": true, "checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7", "dest": "/etc/containers/systemd/quadlet-demo.kube", "gid": 0, "group": "root", "md5sum": "da53c88f92b68b0487aa209f795b6bb3", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 456, "src": "/root/.ansible/tmp/ansible-tmp-1736008463.0030222-20502-2952385679144/.source.kube", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 04 January 2025 11:34:23 -0500 (0:00:00.662) 0:01:19.855 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 04 January 2025 11:34:23 -0500 (0:00:00.033) 0:01:19.888 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 04 January 2025 11:34:23 -0500 (0:00:00.030) 0:01:19.918 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 04 January 2025 11:34:24 -0500 (0:00:00.755) 0:01:20.674 ****** changed: [managed-node2] => { "changed": true, "name": "quadlet-demo.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "basic.target network-online.target -.mount sysinit.target systemd-journald.socket system.slice quadlet-demo-network.service quadlet-demo-mysql.service", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698245632", "EffectiveMemoryMax": "3698245632", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2394804224", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "quadlet-demo-mysql.service system.slice sysinit.target -.mount quadlet-demo-network.service", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.kube", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 04 January 2025 11:34:25 -0500 (0:00:01.140) 0:01:21.814 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 04 January 2025 11:34:25 -0500 (0:00:00.073) 0:01:21.888 ****** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 04 January 2025 11:34:25 -0500 (0:00:00.026) 0:01:21.915 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 04 January 2025 11:34:25 -0500 (0:00:00.026) 0:01:21.942 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Check quadlet files] ***************************************************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:96 Saturday 04 January 2025 11:34:25 -0500 (0:00:00.041) 0:01:21.984 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "ls", "-alrtF", "/etc/containers/systemd" ], "delta": "0:00:00.005518", "end": "2025-01-04 11:34:26.098367", "rc": 0, "start": "2025-01-04 11:34:26.092849" } STDOUT: total 24 drwxr-xr-x. 9 root root 178 Jan 4 11:30 ../ -rw-r--r--. 1 root root 74 Jan 4 11:33 quadlet-demo.network -rw-r--r--. 1 root root 9 Jan 4 11:33 quadlet-demo-mysql.volume -rw-r--r--. 1 root root 363 Jan 4 11:33 quadlet-demo-mysql.container -rw-r--r--. 1 root root 2102 Jan 4 11:33 envoy-proxy-configmap.yml -rw-r--r--. 1 root root 1605 Jan 4 11:34 quadlet-demo.yml -rw-r--r--. 1 root root 456 Jan 4 11:34 quadlet-demo.kube drwxr-xr-x. 2 root root 185 Jan 4 11:34 ./ TASK [Check containers] ******************************************************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:100 Saturday 04 January 2025 11:34:26 -0500 (0:00:00.423) 0:01:22.407 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "-a" ], "delta": "0:00:00.043083", "end": "2025-01-04 11:34:26.580992", "failed_when_result": false, "rc": 0, "start": "2025-01-04 11:34:26.537909" } STDOUT: CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 23724cd2b98c localhost/podman-pause:5.3.1-1733097600 About a minute ago Up About a minute c8c001b59877-service dbc2b9607a4c localhost/podman-pause:5.3.1-1733097600 About a minute ago Up About a minute 0.0.0.0:15002->80/tcp ecaac7b287a8-infra f27f043c4df7 quay.io/libpod/testimage:20210610 About a minute ago Up About a minute 0.0.0.0:15002->80/tcp httpd2-httpd2 ea65b4c142a1 localhost/podman-pause:5.3.1-1733097600 About a minute ago Up About a minute 8c5a350fe43d-service 196161f20f83 localhost/podman-pause:5.3.1-1733097600 About a minute ago Up About a minute 0.0.0.0:15003->80/tcp a93b7ede24ca-infra cba46159e385 quay.io/libpod/testimage:20210610 About a minute ago Up About a minute 0.0.0.0:15003->80/tcp httpd3-httpd3 1ce48534b65f quay.io/linux-system-roles/mysql:5.6 mysqld 32 seconds ago Up 32 seconds (healthy) 3306/tcp quadlet-demo-mysql f07164ee3e90 localhost/podman-pause:5.3.1-1733097600 1 second ago Up 1 second a96f3a51b8d1-service 65b9ce253bc1 localhost/podman-pause:5.3.1-1733097600 1 second ago Up 1 second 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp 56bbf7fc2aa4-infra 99436d65d89a quay.io/linux-system-roles/wordpress:4.8-apache apache2-foregroun... 1 second ago Up 1 second 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 80/tcp quadlet-demo-wordpress 23c3f79e2bc8 quay.io/linux-system-roles/envoyproxy:v1.25.0 envoy -c /etc/env... 1 second ago Up 1 second 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 10000/tcp quadlet-demo-envoy TASK [Check volumes] *********************************************************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:105 Saturday 04 January 2025 11:34:26 -0500 (0:00:00.481) 0:01:22.889 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls" ], "delta": "0:00:00.028733", "end": "2025-01-04 11:34:27.018669", "failed_when_result": false, "rc": 0, "start": "2025-01-04 11:34:26.989936" } STDOUT: DRIVER VOLUME NAME local systemd-quadlet-demo-mysql local wp-pv-claim local envoy-proxy-config local envoy-certificates TASK [Check pods] ************************************************************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:110 Saturday 04 January 2025 11:34:27 -0500 (0:00:00.426) 0:01:23.315 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "pod", "ps", "--ctr-ids", "--ctr-names", "--ctr-status" ], "delta": "0:00:00.038171", "end": "2025-01-04 11:34:27.407452", "failed_when_result": false, "rc": 0, "start": "2025-01-04 11:34:27.369281" } STDOUT: POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS 56bbf7fc2aa4 quadlet-demo Running 2 seconds ago 65b9ce253bc1 65b9ce253bc1,99436d65d89a,23c3f79e2bc8 56bbf7fc2aa4-infra,quadlet-demo-wordpress,quadlet-demo-envoy running,running,running a93b7ede24ca httpd3 Running About a minute ago 196161f20f83 196161f20f83,cba46159e385 a93b7ede24ca-infra,httpd3-httpd3 running,running ecaac7b287a8 httpd2 Running About a minute ago dbc2b9607a4c dbc2b9607a4c,f27f043c4df7 ecaac7b287a8-infra,httpd2-httpd2 running,running TASK [Check systemd] *********************************************************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:115 Saturday 04 January 2025 11:34:27 -0500 (0:00:00.412) 0:01:23.728 ****** ok: [managed-node2] => { "changed": false, "cmd": "set -euo pipefail; systemctl list-units | grep quadlet", "delta": "0:00:00.014707", "end": "2025-01-04 11:34:27.878670", "failed_when_result": false, "rc": 0, "start": "2025-01-04 11:34:27.863963" } STDOUT: quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service quadlet-demo-mysql.service loaded active running quadlet-demo-mysql.service quadlet-demo-network.service loaded active exited quadlet-demo-network.service quadlet-demo.service loaded active running quadlet-demo.service TASK [Check web] *************************************************************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:121 Saturday 04 January 2025 11:34:27 -0500 (0:00:00.462) 0:01:24.190 ****** FAILED - RETRYING: [managed-node2]: Check web (6 retries left). FAILED - RETRYING: [managed-node2]: Check web (5 retries left). FAILED - RETRYING: [managed-node2]: Check web (4 retries left). FAILED - RETRYING: [managed-node2]: Check web (3 retries left). FAILED - RETRYING: [managed-node2]: Check web (2 retries left). FAILED - RETRYING: [managed-node2]: Check web (1 retries left). fatal: [managed-node2]: FAILED! => { "attempts": 6, "changed": false, "dest": "/run/out", "elapsed": 0, "url": "https://localhost:8000" } MSG: Request failed: TASK [Dump journal] ************************************************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:142 Saturday 04 January 2025 11:35:01 -0500 (0:00:33.355) 0:01:57.545 ****** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.031582", "end": "2025-01-04 11:35:01.667923", "failed_when_result": true, "rc": 0, "start": "2025-01-04 11:35:01.636341" } STDOUT: Jan 04 11:29:27 managed-node2 groupadd[8883]: new group: name=polkitd, GID=114 Jan 04 11:29:27 managed-node2 useradd[8886]: new user: name=polkitd, UID=114, GID=114, home=/, shell=/sbin/nologin, from=none Jan 04 11:29:27 managed-node2 dbus-broker-launch[633]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 04 11:29:27 managed-node2 dbus-broker-launch[633]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 04 11:29:27 managed-node2 dbus-broker-launch[633]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 04 11:29:27 managed-node2 systemd[1]: Listening on pcscd.socket - PC/SC Smart Card Daemon Activation Socket. ░░ Subject: A start job for unit pcscd.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pcscd.socket has finished successfully. ░░ ░░ The job identifier is 1321. Jan 04 11:29:45 managed-node2 systemd[1]: Started run-r59b0789b29394fbf9755901143f0e79b.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r59b0789b29394fbf9755901143f0e79b.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r59b0789b29394fbf9755901143f0e79b.service has finished successfully. ░░ ░░ The job identifier is 1399. Jan 04 11:29:45 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1477. Jan 04 11:29:45 managed-node2 systemctl[9578]: Warning: The unit file, source configuration file or drop-ins of man-db-cache-update.service changed on disk. Run 'systemctl daemon-reload' to reload units. Jan 04 11:29:45 managed-node2 systemd[1]: Reload requested from client PID 9581 ('systemctl') (unit session-5.scope)... Jan 04 11:29:45 managed-node2 systemd[1]: Reloading... Jan 04 11:29:45 managed-node2 systemd[1]: Reloading finished in 226 ms. Jan 04 11:29:45 managed-node2 systemd[1]: Queuing reload/restart jobs for marked units… Jan 04 11:29:45 managed-node2 sudo[8756]: pam_unix(sudo:session): session closed for user root Jan 04 11:29:46 managed-node2 python3.12[10171]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:29:47 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jan 04 11:29:47 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1477. Jan 04 11:29:47 managed-node2 systemd[1]: run-r59b0789b29394fbf9755901143f0e79b.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r59b0789b29394fbf9755901143f0e79b.service has successfully entered the 'dead' state. Jan 04 11:29:47 managed-node2 python3.12[10309]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 04 11:29:47 managed-node2 python3.12[10444]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:29:49 managed-node2 python3.12[10577]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:29:50 managed-node2 python3.12[10708]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:29:50 managed-node2 python3.12[10839]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 04 11:29:52 managed-node2 python3.12[10971]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 04 11:29:52 managed-node2 python3.12[11104]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 04 11:29:52 managed-node2 systemd[1]: Reload requested from client PID 11107 ('systemctl') (unit session-5.scope)... Jan 04 11:29:52 managed-node2 systemd[1]: Reloading... Jan 04 11:29:52 managed-node2 systemd[1]: Reloading finished in 184 ms. Jan 04 11:29:52 managed-node2 systemd[1]: Starting firewalld.service - firewalld - dynamic firewall daemon... ░░ Subject: A start job for unit firewalld.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has begun execution. ░░ ░░ The job identifier is 1555. Jan 04 11:29:53 managed-node2 systemd[1]: Started firewalld.service - firewalld - dynamic firewall daemon. ░░ Subject: A start job for unit firewalld.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has finished successfully. ░░ ░░ The job identifier is 1555. Jan 04 11:29:53 managed-node2 kernel: Warning: Unmaintained driver is detected: ip_set Jan 04 11:29:53 managed-node2 systemd[1]: Starting polkit.service - Authorization Manager... ░░ Subject: A start job for unit polkit.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit polkit.service has begun execution. ░░ ░░ The job identifier is 1639. Jan 04 11:29:53 managed-node2 polkitd[11251]: Started polkitd version 125 Jan 04 11:29:53 managed-node2 systemd[1]: Started polkit.service - Authorization Manager. ░░ Subject: A start job for unit polkit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit polkit.service has finished successfully. ░░ ░░ The job identifier is 1639. Jan 04 11:29:53 managed-node2 python3.12[11346]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 04 11:29:55 managed-node2 python3.12[11477]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:29:55 managed-node2 rsyslogd[662]: imjournal: journal files changed, reloading... [v8.2408.0-2.el10 try https://www.rsyslog.com/e/0 ] Jan 04 11:29:55 managed-node2 python3.12[11609]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:29:56 managed-node2 python3.12[11740]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 04 11:29:56 managed-node2 python3.12[11872]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 04 11:29:57 managed-node2 dbus-broker-launch[633]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 04 11:29:57 managed-node2 dbus-broker-launch[633]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 04 11:29:57 managed-node2 chronyd[673]: Selected source 10.2.32.38 Jan 04 11:29:57 managed-node2 systemd[1]: Started run-r6693fc22d3104c0892fa7b574ab9dafe.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r6693fc22d3104c0892fa7b574ab9dafe.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r6693fc22d3104c0892fa7b574ab9dafe.service has finished successfully. ░░ ░░ The job identifier is 1720. Jan 04 11:29:57 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1798. Jan 04 11:29:57 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jan 04 11:29:57 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1798. Jan 04 11:29:58 managed-node2 systemd[1]: run-r6693fc22d3104c0892fa7b574ab9dafe.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r6693fc22d3104c0892fa7b574ab9dafe.service has successfully entered the 'dead' state. Jan 04 11:29:58 managed-node2 python3.12[12012]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 04 11:29:59 managed-node2 python3.12[12174]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 04 11:30:00 managed-node2 kernel: SELinux: Converting 475 SID table entries... Jan 04 11:30:00 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Jan 04 11:30:00 managed-node2 kernel: SELinux: policy capability open_perms=1 Jan 04 11:30:00 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Jan 04 11:30:00 managed-node2 kernel: SELinux: policy capability always_check_network=0 Jan 04 11:30:00 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Jan 04 11:30:00 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jan 04 11:30:00 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Jan 04 11:30:00 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Jan 04 11:30:00 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Jan 04 11:30:01 managed-node2 python3.12[12309]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 04 11:30:05 managed-node2 python3.12[12440]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:30:07 managed-node2 python3.12[12573]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:30:07 managed-node2 python3.12[12704]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:30:08 managed-node2 python3.12[12835]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 04 11:30:08 managed-node2 python3.12[12940]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/nopull.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736008207.803793-8899-200608480777734/.source.yml _original_basename=.e_xekutr follow=False checksum=d5dc917e3cae36de03aa971a17ac473f86fdf934 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:30:08 managed-node2 python3.12[13071]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 04 11:30:09 managed-node2 systemd[1]: var-lib-containers-storage-overlay-compat1097166380-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-compat1097166380-merged.mount has successfully entered the 'dead' state. Jan 04 11:30:09 managed-node2 kernel: evm: overlay not supported Jan 04 11:30:09 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck3977688696-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-metacopy\x2dcheck3977688696-merged.mount has successfully entered the 'dead' state. Jan 04 11:30:09 managed-node2 podman[13078]: 2025-01-04 11:30:09.081625884 -0500 EST m=+0.077816774 system refresh Jan 04 11:30:09 managed-node2 podman[13078]: 2025-01-04 11:30:09.353165704 -0500 EST m=+0.349356676 image build c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994 Jan 04 11:30:09 managed-node2 systemd[1]: Created slice machine.slice - Slice /machine. ░░ Subject: A start job for unit machine.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine.slice has finished successfully. ░░ ░░ The job identifier is 1878. Jan 04 11:30:09 managed-node2 systemd[1]: Created slice machine-libpod_pod_eeb8a75f8fbe95de1e4a52936a1e7002f29f725052fb440dae11e1022ede537e.slice - cgroup machine-libpod_pod_eeb8a75f8fbe95de1e4a52936a1e7002f29f725052fb440dae11e1022ede537e.slice. ░░ Subject: A start job for unit machine-libpod_pod_eeb8a75f8fbe95de1e4a52936a1e7002f29f725052fb440dae11e1022ede537e.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_eeb8a75f8fbe95de1e4a52936a1e7002f29f725052fb440dae11e1022ede537e.slice has finished successfully. ░░ ░░ The job identifier is 1877. Jan 04 11:30:09 managed-node2 podman[13078]: 2025-01-04 11:30:09.405445007 -0500 EST m=+0.401635815 container create ad9f5d01511ef4f51a2b4443b7fc8b9373dfeaba7e7553a2eac52c1034353a2c (image=localhost/podman-pause:5.3.1-1733097600, name=eeb8a75f8fbe-infra, pod_id=eeb8a75f8fbe95de1e4a52936a1e7002f29f725052fb440dae11e1022ede537e, io.buildah.version=1.38.0) Jan 04 11:30:09 managed-node2 podman[13078]: 2025-01-04 11:30:09.411509617 -0500 EST m=+0.407700490 pod create eeb8a75f8fbe95de1e4a52936a1e7002f29f725052fb440dae11e1022ede537e (image=, name=nopull) Jan 04 11:30:11 managed-node2 podman[13078]: 2025-01-04 11:30:11.153913799 -0500 EST m=+2.150104619 container create 5414cafd234a3de364c5bfd5e18232830d93758a7c7fa79d226e3bf8287bb5c4 (image=quay.io/libpod/testimage:20210610, name=nopull-nopull, pod_id=eeb8a75f8fbe95de1e4a52936a1e7002f29f725052fb440dae11e1022ede537e, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z) Jan 04 11:30:11 managed-node2 podman[13078]: 2025-01-04 11:30:11.129113453 -0500 EST m=+2.125304403 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 04 11:30:11 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 04 11:30:13 managed-node2 python3.12[13408]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:30:15 managed-node2 python3.12[13546]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:30:18 managed-node2 python3.12[13679]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 04 11:30:19 managed-node2 python3.12[13811]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 04 11:30:20 managed-node2 python3.12[13944]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 04 11:30:21 managed-node2 python3.12[14077]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 04 11:30:23 managed-node2 python3.12[14208]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 04 11:30:24 managed-node2 python3.12[14340]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 04 11:30:25 managed-node2 python3.12[14472]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 04 11:30:26 managed-node2 python3.12[14632]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 04 11:30:27 managed-node2 python3.12[14763]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 04 11:30:32 managed-node2 python3.12[14894]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:30:34 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 04 11:30:34 managed-node2 podman[15036]: 2025-01-04 11:30:34.875969715 -0500 EST m=+0.248379560 image pull-error quay.io/linux-system-roles/this_is_a_bogus_image:latest initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized Jan 04 11:30:34 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 04 11:30:35 managed-node2 python3.12[15174]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:30:35 managed-node2 python3.12[15305]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:30:36 managed-node2 python3.12[15436]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 04 11:30:36 managed-node2 python3.12[15541]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/bogus.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736008235.9242928-10201-227096276187423/.source.yml _original_basename=.kuulbywm follow=False checksum=f8266a972ed3be7e204d2a67883fe3a22b8dbf18 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:30:37 managed-node2 python3.12[15672]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 04 11:30:37 managed-node2 systemd[1]: Created slice machine-libpod_pod_b735ccd770a976ee1d5bfed7507f82931eb97eb2293bcb7906d1381bcdf6ee80.slice - cgroup machine-libpod_pod_b735ccd770a976ee1d5bfed7507f82931eb97eb2293bcb7906d1381bcdf6ee80.slice. ░░ Subject: A start job for unit machine-libpod_pod_b735ccd770a976ee1d5bfed7507f82931eb97eb2293bcb7906d1381bcdf6ee80.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_b735ccd770a976ee1d5bfed7507f82931eb97eb2293bcb7906d1381bcdf6ee80.slice has finished successfully. ░░ ░░ The job identifier is 1883. Jan 04 11:30:37 managed-node2 podman[15679]: 2025-01-04 11:30:37.125248133 -0500 EST m=+0.059771611 container create 728b039efd0cd9f3735ece5f95e29b4bb1991668c1f425704bf5c6a903978edf (image=localhost/podman-pause:5.3.1-1733097600, name=b735ccd770a9-infra, pod_id=b735ccd770a976ee1d5bfed7507f82931eb97eb2293bcb7906d1381bcdf6ee80, io.buildah.version=1.38.0) Jan 04 11:30:37 managed-node2 podman[15679]: 2025-01-04 11:30:37.131555918 -0500 EST m=+0.066079366 pod create b735ccd770a976ee1d5bfed7507f82931eb97eb2293bcb7906d1381bcdf6ee80 (image=, name=bogus) Jan 04 11:30:37 managed-node2 podman[15679]: 2025-01-04 11:30:37.385607233 -0500 EST m=+0.320130694 image pull-error quay.io/linux-system-roles/this_is_a_bogus_image:latest initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized Jan 04 11:30:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 04 11:30:40 managed-node2 python3.12[15949]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:30:41 managed-node2 python3.12[16086]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:30:44 managed-node2 python3.12[16219]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 04 11:30:45 managed-node2 python3.12[16351]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 04 11:30:46 managed-node2 python3.12[16484]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 04 11:30:46 managed-node2 python3.12[16617]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 04 11:30:48 managed-node2 python3.12[16748]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 04 11:30:49 managed-node2 python3.12[16880]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 04 11:30:50 managed-node2 python3.12[17012]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 04 11:30:52 managed-node2 python3.12[17172]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 04 11:30:52 managed-node2 python3.12[17303]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 04 11:30:57 managed-node2 python3.12[17434]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:30:59 managed-node2 python3.12[17567]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/nopull.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:30:59 managed-node2 python3.12[17699]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-nopull.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jan 04 11:31:00 managed-node2 python3.12[17832]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:31:00 managed-node2 python3.12[17965]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 04 11:31:00 managed-node2 python3.12[17965]: ansible-containers.podman.podman_play version: 5.3.1, kube file /etc/containers/ansible-kubernetes.d/nopull.yml Jan 04 11:31:00 managed-node2 podman[17972]: 2025-01-04 11:31:00.839724776 -0500 EST m=+0.024874940 pod stop eeb8a75f8fbe95de1e4a52936a1e7002f29f725052fb440dae11e1022ede537e (image=, name=nopull) Jan 04 11:31:00 managed-node2 systemd[1]: Removed slice machine-libpod_pod_eeb8a75f8fbe95de1e4a52936a1e7002f29f725052fb440dae11e1022ede537e.slice - cgroup machine-libpod_pod_eeb8a75f8fbe95de1e4a52936a1e7002f29f725052fb440dae11e1022ede537e.slice. ░░ Subject: A stop job for unit machine-libpod_pod_eeb8a75f8fbe95de1e4a52936a1e7002f29f725052fb440dae11e1022ede537e.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_eeb8a75f8fbe95de1e4a52936a1e7002f29f725052fb440dae11e1022ede537e.slice has finished. ░░ ░░ The job identifier is 1889 and the job result is done. Jan 04 11:31:00 managed-node2 podman[17972]: 2025-01-04 11:31:00.882910761 -0500 EST m=+0.068060542 container remove 5414cafd234a3de364c5bfd5e18232830d93758a7c7fa79d226e3bf8287bb5c4 (image=quay.io/libpod/testimage:20210610, name=nopull-nopull, pod_id=eeb8a75f8fbe95de1e4a52936a1e7002f29f725052fb440dae11e1022ede537e, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Jan 04 11:31:00 managed-node2 podman[17972]: 2025-01-04 11:31:00.90852606 -0500 EST m=+0.093675841 container remove ad9f5d01511ef4f51a2b4443b7fc8b9373dfeaba7e7553a2eac52c1034353a2c (image=localhost/podman-pause:5.3.1-1733097600, name=eeb8a75f8fbe-infra, pod_id=eeb8a75f8fbe95de1e4a52936a1e7002f29f725052fb440dae11e1022ede537e, io.buildah.version=1.38.0) Jan 04 11:31:00 managed-node2 podman[17972]: 2025-01-04 11:31:00.917819107 -0500 EST m=+0.102968855 pod remove eeb8a75f8fbe95de1e4a52936a1e7002f29f725052fb440dae11e1022ede537e (image=, name=nopull) Jan 04 11:31:00 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 04 11:31:01 managed-node2 python3.12[18111]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:31:01 managed-node2 python3.12[18242]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:31:01 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 04 11:31:05 managed-node2 python3.12[18512]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:31:06 managed-node2 python3.12[18649]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:31:10 managed-node2 python3.12[18782]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 04 11:31:11 managed-node2 python3.12[18914]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 04 11:31:12 managed-node2 python3.12[19047]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 04 11:31:13 managed-node2 python3.12[19180]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 04 11:31:15 managed-node2 python3.12[19311]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 04 11:31:16 managed-node2 python3.12[19443]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 04 11:31:17 managed-node2 python3.12[19575]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 04 11:31:18 managed-node2 python3.12[19735]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 04 11:31:19 managed-node2 python3.12[19866]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 04 11:31:24 managed-node2 python3.12[19997]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:31:26 managed-node2 python3.12[20130]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/bogus.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:31:26 managed-node2 python3.12[20262]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-bogus.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jan 04 11:31:27 managed-node2 python3.12[20395]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:31:27 managed-node2 python3.12[20528]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 04 11:31:27 managed-node2 python3.12[20528]: ansible-containers.podman.podman_play version: 5.3.1, kube file /etc/containers/ansible-kubernetes.d/bogus.yml Jan 04 11:31:27 managed-node2 podman[20535]: 2025-01-04 11:31:27.979503036 -0500 EST m=+0.027514865 pod stop b735ccd770a976ee1d5bfed7507f82931eb97eb2293bcb7906d1381bcdf6ee80 (image=, name=bogus) Jan 04 11:31:27 managed-node2 systemd[1]: Removed slice machine-libpod_pod_b735ccd770a976ee1d5bfed7507f82931eb97eb2293bcb7906d1381bcdf6ee80.slice - cgroup machine-libpod_pod_b735ccd770a976ee1d5bfed7507f82931eb97eb2293bcb7906d1381bcdf6ee80.slice. ░░ Subject: A stop job for unit machine-libpod_pod_b735ccd770a976ee1d5bfed7507f82931eb97eb2293bcb7906d1381bcdf6ee80.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_b735ccd770a976ee1d5bfed7507f82931eb97eb2293bcb7906d1381bcdf6ee80.slice has finished. ░░ ░░ The job identifier is 1891 and the job result is done. Jan 04 11:31:28 managed-node2 podman[20535]: 2025-01-04 11:31:28.017511925 -0500 EST m=+0.065523582 container remove 728b039efd0cd9f3735ece5f95e29b4bb1991668c1f425704bf5c6a903978edf (image=localhost/podman-pause:5.3.1-1733097600, name=b735ccd770a9-infra, pod_id=b735ccd770a976ee1d5bfed7507f82931eb97eb2293bcb7906d1381bcdf6ee80, io.buildah.version=1.38.0) Jan 04 11:31:28 managed-node2 podman[20535]: 2025-01-04 11:31:28.026984016 -0500 EST m=+0.074995653 pod remove b735ccd770a976ee1d5bfed7507f82931eb97eb2293bcb7906d1381bcdf6ee80 (image=, name=bogus) Jan 04 11:31:28 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 04 11:31:28 managed-node2 python3.12[20675]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:31:29 managed-node2 python3.12[20806]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:31:29 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 04 11:31:32 managed-node2 python3.12[21076]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:31:33 managed-node2 python3.12[21213]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:31:36 managed-node2 python3.12[21346]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 04 11:31:37 managed-node2 python3.12[21478]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 04 11:31:38 managed-node2 python3.12[21611]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 04 11:31:39 managed-node2 python3.12[21744]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 04 11:31:41 managed-node2 python3.12[21875]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 04 11:31:42 managed-node2 python3.12[22007]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 04 11:31:43 managed-node2 python3.12[22139]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 04 11:31:45 managed-node2 python3.12[22299]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 04 11:31:46 managed-node2 python3.12[22430]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 04 11:31:51 managed-node2 python3.12[22561]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jan 04 11:31:52 managed-node2 python3.12[22693]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:31:52 managed-node2 python3.12[22826]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:31:53 managed-node2 python3.12[22958]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:31:54 managed-node2 python3.12[23090]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:31:55 managed-node2 python3.12[23222]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 04 11:31:55 managed-node2 systemd[1]: Created slice user-3001.slice - User Slice of UID 3001. ░░ Subject: A start job for unit user-3001.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-3001.slice has finished successfully. ░░ ░░ The job identifier is 1894. Jan 04 11:31:55 managed-node2 systemd[1]: Starting user-runtime-dir@3001.service - User Runtime Directory /run/user/3001... ░░ Subject: A start job for unit user-runtime-dir@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@3001.service has begun execution. ░░ ░░ The job identifier is 1893. Jan 04 11:31:55 managed-node2 systemd[1]: Finished user-runtime-dir@3001.service - User Runtime Directory /run/user/3001. ░░ Subject: A start job for unit user-runtime-dir@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@3001.service has finished successfully. ░░ ░░ The job identifier is 1893. Jan 04 11:31:55 managed-node2 systemd[1]: Starting user@3001.service - User Manager for UID 3001... ░░ Subject: A start job for unit user@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has begun execution. ░░ ░░ The job identifier is 1973. Jan 04 11:31:55 managed-node2 systemd-logind[663]: New session 6 of user podman_basic_user. ░░ Subject: A new session 6 has been created for user podman_basic_user ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 6 has been created for the user podman_basic_user. ░░ ░░ The leading process of the session is 23226. Jan 04 11:31:55 managed-node2 (systemd)[23226]: pam_unix(systemd-user:session): session opened for user podman_basic_user(uid=3001) by podman_basic_user(uid=0) Jan 04 11:31:55 managed-node2 systemd[23226]: Queued start job for default target default.target. Jan 04 11:31:55 managed-node2 systemd[23226]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Jan 04 11:31:55 managed-node2 systemd[23226]: Started grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 6. Jan 04 11:31:55 managed-node2 systemd[23226]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Jan 04 11:31:55 managed-node2 systemd[23226]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 7. Jan 04 11:31:55 managed-node2 systemd[23226]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Jan 04 11:31:55 managed-node2 systemd[23226]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 9. Jan 04 11:31:55 managed-node2 systemd[23226]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Jan 04 11:31:55 managed-node2 systemd[23226]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Jan 04 11:31:55 managed-node2 systemd[23226]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Jan 04 11:31:55 managed-node2 systemd[23226]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Jan 04 11:31:55 managed-node2 systemd[23226]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Jan 04 11:31:55 managed-node2 systemd[23226]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Jan 04 11:31:55 managed-node2 systemd[23226]: Startup finished in 70ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 3001 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 70774 microseconds. Jan 04 11:31:55 managed-node2 systemd[1]: Started user@3001.service - User Manager for UID 3001. ░░ Subject: A start job for unit user@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has finished successfully. ░░ ░░ The job identifier is 1973. Jan 04 11:31:56 managed-node2 python3.12[23372]: ansible-file Invoked with path=/tmp/lsr_83t3r8ir_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:31:56 managed-node2 python3.12[23503]: ansible-file Invoked with path=/tmp/lsr_83t3r8ir_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:31:57 managed-node2 sudo[23676]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ukvatkplbrcszidibphkwttlsgmqtdhx ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736008317.157093-14273-171456620512807/AnsiballZ_podman_image.py' Jan 04 11:31:57 managed-node2 sudo[23676]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-23676) opened. Jan 04 11:31:57 managed-node2 sudo[23676]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 04 11:31:57 managed-node2 systemd[23226]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 18. Jan 04 11:31:57 managed-node2 systemd[23226]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 04 11:31:57 managed-node2 dbus-broker-launch[23700]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 04 11:31:57 managed-node2 dbus-broker-launch[23700]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 04 11:31:57 managed-node2 systemd[23226]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 04 11:31:57 managed-node2 dbus-broker-launch[23700]: Ready Jan 04 11:31:57 managed-node2 systemd[23226]: Created slice user.slice - Slice /user. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 20. Jan 04 11:31:57 managed-node2 systemd[23226]: Started podman-23686.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 19. Jan 04 11:31:57 managed-node2 systemd[23226]: Started podman-pause-020c7518.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 23. Jan 04 11:31:57 managed-node2 systemd[23226]: Started podman-23702.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 27. Jan 04 11:31:58 managed-node2 systemd[23226]: Started podman-23727.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 31. Jan 04 11:31:58 managed-node2 sudo[23676]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 04 11:31:59 managed-node2 python3.12[23864]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:31:59 managed-node2 python3.12[23995]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:31:59 managed-node2 python3.12[24126]: ansible-ansible.legacy.stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 04 11:32:00 managed-node2 python3.12[24231]: ansible-ansible.legacy.copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736008319.6432922-14384-218752844033904/.source.yml _original_basename=.pixe7uee follow=False checksum=1a8262735b4b21563f85a46c73b644d4dc2b00fd backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:32:00 managed-node2 sudo[24404]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rxnpeowjkhqgrhucbnhrgolbojsqpxcl ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736008320.3666356-14423-205375948089438/AnsiballZ_podman_play.py' Jan 04 11:32:00 managed-node2 sudo[24404]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24404) opened. Jan 04 11:32:00 managed-node2 sudo[24404]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 04 11:32:00 managed-node2 python3.12[24407]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 04 11:32:00 managed-node2 systemd[23226]: Started podman-24414.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 35. Jan 04 11:32:01 managed-node2 systemd[23226]: Created slice user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice - cgroup user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 39. Jan 04 11:32:01 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6 Jan 04 11:32:01 managed-node2 systemd[23226]: Started rootless-netns-1c449f92.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 43. Jan 04 11:32:01 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Jan 04 11:32:01 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 04 11:32:01 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 04 11:32:01 managed-node2 kernel: veth0: entered allmulticast mode Jan 04 11:32:01 managed-node2 kernel: veth0: entered promiscuous mode Jan 04 11:32:01 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 04 11:32:01 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 04 11:32:01 managed-node2 systemd[23226]: Started run-r24cb90d5c5ab4923b0b6b9050b5850e9.scope - /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 47. Jan 04 11:32:01 managed-node2 aardvark-dns[24497]: starting aardvark on a child with pid 24498 Jan 04 11:32:01 managed-node2 aardvark-dns[24498]: Successfully parsed config Jan 04 11:32:01 managed-node2 aardvark-dns[24498]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Jan 04 11:32:01 managed-node2 aardvark-dns[24498]: Listen v6 ip {} Jan 04 11:32:01 managed-node2 aardvark-dns[24498]: Using the following upstream servers: [169.254.1.1:53, 10.29.169.13:53, 10.29.170.12:53] Jan 04 11:32:01 managed-node2 conmon[24515]: conmon 219b809a67b76f01c3e3 : failed to write to /proc/self/oom_score_adj: Permission denied Jan 04 11:32:01 managed-node2 systemd[23226]: Started libpod-conmon-219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 51. Jan 04 11:32:01 managed-node2 conmon[24516]: conmon 219b809a67b76f01c3e3 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach} Jan 04 11:32:01 managed-node2 conmon[24516]: conmon 219b809a67b76f01c3e3 : terminal_ctrl_fd: 14 Jan 04 11:32:01 managed-node2 conmon[24516]: conmon 219b809a67b76f01c3e3 : winsz read side: 17, winsz write side: 18 Jan 04 11:32:01 managed-node2 systemd[23226]: Started libpod-219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 56. Jan 04 11:32:01 managed-node2 conmon[24516]: conmon 219b809a67b76f01c3e3 : container PID: 24518 Jan 04 11:32:01 managed-node2 conmon[24520]: conmon a6beb024b5d86fca9df6 : failed to write to /proc/self/oom_score_adj: Permission denied Jan 04 11:32:01 managed-node2 systemd[23226]: Started libpod-conmon-a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 61. Jan 04 11:32:01 managed-node2 conmon[24521]: conmon a6beb024b5d86fca9df6 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Jan 04 11:32:01 managed-node2 conmon[24521]: conmon a6beb024b5d86fca9df6 : terminal_ctrl_fd: 13 Jan 04 11:32:01 managed-node2 conmon[24521]: conmon a6beb024b5d86fca9df6 : winsz read side: 16, winsz write side: 17 Jan 04 11:32:01 managed-node2 systemd[23226]: Started libpod-a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 66. Jan 04 11:32:01 managed-node2 conmon[24521]: conmon a6beb024b5d86fca9df6 : container PID: 24523 Jan 04 11:32:01 managed-node2 python3.12[24407]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jan 04 11:32:01 managed-node2 python3.12[24407]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: 4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e Container: a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079 Jan 04 11:32:01 managed-node2 python3.12[24407]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-01-04T11:32:00-05:00" level=info msg="/bin/podman filtering at log level debug" time="2025-01-04T11:32:00-05:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-01-04T11:32:00-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-01-04T11:32:00-05:00" level=info msg="Using sqlite as database backend" time="2025-01-04T11:32:00-05:00" level=debug msg="systemd-logind: Unknown object '/'." time="2025-01-04T11:32:00-05:00" level=debug msg="Using graph driver overlay" time="2025-01-04T11:32:00-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2025-01-04T11:32:00-05:00" level=debug msg="Using run root /run/user/3001/containers" time="2025-01-04T11:32:00-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2025-01-04T11:32:00-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2025-01-04T11:32:00-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2025-01-04T11:32:00-05:00" level=debug msg="Using transient store: false" time="2025-01-04T11:32:00-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-01-04T11:32:00-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-04T11:32:00-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-04T11:32:00-05:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2025-01-04T11:32:00-05:00" level=debug msg="Cached value indicated that native-diff is usable" time="2025-01-04T11:32:00-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2025-01-04T11:32:00-05:00" level=debug msg="Initializing event backend file" time="2025-01-04T11:32:00-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-01-04T11:32:00-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2025-01-04T11:32:00-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-01-04T11:32:00-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2025-01-04T11:32:00-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-01-04T11:32:00-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-01-04T11:32:00-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-01-04T11:32:00-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-01-04T11:32:00-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-01-04T11:32:00-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2025-01-04T11:32:00-05:00" level=info msg="Setting parallel job count to 7" time="2025-01-04T11:32:00-05:00" level=debug msg="Successfully loaded 1 networks" time="2025-01-04T11:32:00-05:00" level=debug msg="found free device name podman1" time="2025-01-04T11:32:00-05:00" level=debug msg="found free ipv4 network subnet 10.89.0.0/24" time="2025-01-04T11:32:00-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-04T11:32:00-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:00-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-04T11:32:00-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2025-01-04T11:32:00-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-04T11:32:00-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2025-01-04T11:32:00-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-04T11:32:00-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:00-05:00" level=debug msg="FROM \"scratch\"" time="2025-01-04T11:32:00-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2025-01-04T11:32:00-05:00" level=debug msg="Check for idmapped mounts support " time="2025-01-04T11:32:00-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:00-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:00-05:00" level=debug msg="overlay: test mount indicated that volatile is being used" time="2025-01-04T11:32:00-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/aa04371c3e894aed357c6cfc2570aff6fb7b7d23c9c6cc393c095fa95a1d69f0/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/aa04371c3e894aed357c6cfc2570aff6fb7b7d23c9c6cc393c095fa95a1d69f0/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/aa04371c3e894aed357c6cfc2570aff6fb7b7d23c9c6cc393c095fa95a1d69f0/work,userxattr,volatile,context=\"system_u:object_r:container_file_t:s0:c738,c970\"" time="2025-01-04T11:32:00-05:00" level=debug msg="Container ID: 13148fc10a35a48704ad04e509034933e5e7ceb07c1ddfb356ef2b60b21e5abf" time="2025-01-04T11:32:00-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}" time="2025-01-04T11:32:00-05:00" level=debug msg="COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\"\", Src:[]string{\"/usr/libexec/podman/catatonit\"}, Dest:\"/catatonit\", Download:false, Chown:\"\", Chmod:\"\", Checksum:\"\", Files:[]imagebuilder.File(nil), KeepGitDir:false, Link:false, Parents:false, Excludes:[]string(nil)}" time="2025-01-04T11:32:00-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"13148fc10a35a48704ad04e509034933e5e7ceb07c1ddfb356ef2b60b21e5abf\"" time="2025-01-04T11:32:00-05:00" level=debug msg="added content file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67" time="2025-01-04T11:32:00-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\"/catatonit\", \"-P\"]}" time="2025-01-04T11:32:00-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"13148fc10a35a48704ad04e509034933e5e7ceb07c1ddfb356ef2b60b21e5abf\"" time="2025-01-04T11:32:00-05:00" level=debug msg="COMMIT localhost/podman-pause:5.3.1-1733097600" time="2025-01-04T11:32:00-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-04T11:32:00-05:00" level=debug msg="COMMIT \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-04T11:32:00-05:00" level=debug msg="committing image with reference \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" is allowed by policy" time="2025-01-04T11:32:00-05:00" level=debug msg="layer list: [\"aa04371c3e894aed357c6cfc2570aff6fb7b7d23c9c6cc393c095fa95a1d69f0\"]" time="2025-01-04T11:32:00-05:00" level=debug msg="using \"/var/tmp/buildah1591127554\" to hold temporary data" time="2025-01-04T11:32:00-05:00" level=debug msg="Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/aa04371c3e894aed357c6cfc2570aff6fb7b7d23c9c6cc393c095fa95a1d69f0/diff" time="2025-01-04T11:32:00-05:00" level=debug msg="layer \"aa04371c3e894aed357c6cfc2570aff6fb7b7d23c9c6cc393c095fa95a1d69f0\" size is 699392 bytes, uncompressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6, possibly-compressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6" time="2025-01-04T11:32:00-05:00" level=debug msg="OCIv1 config = {\"created\":\"2025-01-04T16:32:00.970143613Z\",\"architecture\":\"amd64\",\"os\":\"linux\",\"config\":{\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Entrypoint\":[\"/catatonit\",\"-P\"],\"WorkingDir\":\"/\",\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2025-01-04T16:32:00.943891967Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-01-04T16:32:00.973741253Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-01-04T11:32:00-05:00" level=debug msg="OCIv1 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.oci.image.manifest.v1+json\",\"config\":{\"mediaType\":\"application/vnd.oci.image.config.v1+json\",\"digest\":\"sha256:0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0\",\"size\":685},\"layers\":[{\"mediaType\":\"application/vnd.oci.image.layer.v1.tar\",\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\",\"size\":699392}],\"annotations\":{\"org.opencontainers.image.base.digest\":\"\",\"org.opencontainers.image.base.name\":\"\"}}" time="2025-01-04T11:32:00-05:00" level=debug msg="Docker v2s2 config = {\"created\":\"2025-01-04T16:32:00.970143613Z\",\"container\":\"13148fc10a35a48704ad04e509034933e5e7ceb07c1ddfb356ef2b60b21e5abf\",\"container_config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"architecture\":\"amd64\",\"os\":\"linux\",\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2025-01-04T16:32:00.943891967Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-01-04T16:32:00.973741253Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-01-04T11:32:00-05:00" level=debug msg="Docker v2s2 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.docker.distribution.manifest.v2+json\",\"config\":{\"mediaType\":\"application/vnd.docker.container.image.v1+json\",\"size\":1348,\"digest\":\"sha256:c243c799d4fe34f33faeabe031c005a690cfa802239305f6e8f44718b82b5be0\"},\"layers\":[{\"mediaType\":\"application/vnd.docker.image.rootfs.diff.tar\",\"size\":699392,\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"}]}" time="2025-01-04T11:32:00-05:00" level=debug msg="Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite" time="2025-01-04T11:32:00-05:00" level=debug msg="IsRunningImageAllowed for image containers-storage:" time="2025-01-04T11:32:00-05:00" level=debug msg=" Using transport \"containers-storage\" policy section \"\"" time="2025-01-04T11:32:00-05:00" level=debug msg=" Requirement 0: allowed" time="2025-01-04T11:32:00-05:00" level=debug msg="Overall: allowed" time="2025-01-04T11:32:00-05:00" level=debug msg="start reading config" time="2025-01-04T11:32:00-05:00" level=debug msg="finished reading config" time="2025-01-04T11:32:00-05:00" level=debug msg="Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]" time="2025-01-04T11:32:00-05:00" level=debug msg="... will first try using the original manifest unmodified" time="2025-01-04T11:32:00-05:00" level=debug msg="Checking if we can reuse blob sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6: general substitution = true, compression for MIME type \"application/vnd.oci.image.layer.v1.tar\" = true" time="2025-01-04T11:32:00-05:00" level=debug msg="reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2025-01-04T11:32:00-05:00" level=debug msg="No compression detected" time="2025-01-04T11:32:00-05:00" level=debug msg="Using original blob without modification" time="2025-01-04T11:32:00-05:00" level=debug msg="Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6/diff" time="2025-01-04T11:32:01-05:00" level=debug msg="finished reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2025-01-04T11:32:01-05:00" level=debug msg="No compression detected" time="2025-01-04T11:32:01-05:00" level=debug msg="Compression change for blob sha256:0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0 (\"application/vnd.oci.image.config.v1+json\") not supported" time="2025-01-04T11:32:01-05:00" level=debug msg="Using original blob without modification" time="2025-01-04T11:32:01-05:00" level=debug msg="setting image creation date to 2025-01-04 16:32:00.970143613 +0000 UTC" time="2025-01-04T11:32:01-05:00" level=debug msg="created new image ID \"0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0\" with metadata \"{}\"" time="2025-01-04T11:32:01-05:00" level=debug msg="added name \"localhost/podman-pause:5.3.1-1733097600\" to image \"0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0\"" time="2025-01-04T11:32:01-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-04T11:32:01-05:00" level=debug msg="printing final image id \"0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Pod using bridge network mode" time="2025-01-04T11:32:01-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice for parent user.slice and name libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e" time="2025-01-04T11:32:01-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice" time="2025-01-04T11:32:01-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice" time="2025-01-04T11:32:01-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-04T11:32:01-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:01-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-04T11:32:01-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-04T11:32:01-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0)" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Inspecting image 0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Inspecting image 0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0" time="2025-01-04T11:32:01-05:00" level=debug msg="Inspecting image 0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0" time="2025-01-04T11:32:01-05:00" level=debug msg="using systemd mode: false" time="2025-01-04T11:32:01-05:00" level=debug msg="setting container name 4066b38bb5e8-infra" time="2025-01-04T11:32:01-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network aac903796a463b10472049d22d2ff923ce5881d61c354442af57df0f9acd781d bridge podman1 2025-01-04 11:32:00.808293245 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-01-04T11:32:01-05:00" level=debug msg="Successfully loaded 2 networks" time="2025-01-04T11:32:01-05:00" level=debug msg="Allocated lock 1 for container 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Created container \"219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Container \"219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14/userdata\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Container \"219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14\" has run directory \"/run/user/3001/containers/overlay-containers/219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14/userdata\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:01-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:01-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-04T11:32:01-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:01-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-01-04T11:32:01-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:01-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:01-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-04T11:32:01-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:01-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:01-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:01-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-04T11:32:01-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:01-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:01-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:01-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-04T11:32:01-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:01-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-04T11:32:01-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-04T11:32:01-05:00" level=debug msg="using systemd mode: false" time="2025-01-04T11:32:01-05:00" level=debug msg="adding container to pod httpd1" time="2025-01-04T11:32:01-05:00" level=debug msg="setting container name httpd1-httpd1" time="2025-01-04T11:32:01-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-04T11:32:01-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-01-04T11:32:01-05:00" level=debug msg="Adding mount /proc" time="2025-01-04T11:32:01-05:00" level=debug msg="Adding mount /dev" time="2025-01-04T11:32:01-05:00" level=debug msg="Adding mount /dev/pts" time="2025-01-04T11:32:01-05:00" level=debug msg="Adding mount /dev/mqueue" time="2025-01-04T11:32:01-05:00" level=debug msg="Adding mount /sys" time="2025-01-04T11:32:01-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-01-04T11:32:01-05:00" level=debug msg="Allocated lock 2 for container a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Created container \"a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Container \"a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079/userdata\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Container \"a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079\" has run directory \"/run/user/3001/containers/overlay-containers/a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079/userdata\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Strongconnecting node 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14" time="2025-01-04T11:32:01-05:00" level=debug msg="Pushed 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14 onto stack" time="2025-01-04T11:32:01-05:00" level=debug msg="Finishing node 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14. Popped 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14 off stack" time="2025-01-04T11:32:01-05:00" level=debug msg="Strongconnecting node a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079" time="2025-01-04T11:32:01-05:00" level=debug msg="Pushed a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079 onto stack" time="2025-01-04T11:32:01-05:00" level=debug msg="Finishing node a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079. Popped a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079 off stack" time="2025-01-04T11:32:01-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/R7JMOHH4XTSV55MZKCTRNIJDFC,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/e5c22c8b395272c734ae1c2b04ba2a8f0f7d50cc693da01faa29cf467d4dc28f/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/e5c22c8b395272c734ae1c2b04ba2a8f0f7d50cc693da01faa29cf467d4dc28f/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c57,c559\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Made network namespace at /run/user/3001/netns/netns-b90250f7-7c41-4bfe-993a-24596fd823d6 for container 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14" time="2025-01-04T11:32:01-05:00" level=debug msg="Mounted container \"219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/e5c22c8b395272c734ae1c2b04ba2a8f0f7d50cc693da01faa29cf467d4dc28f/merged\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Creating rootless network namespace at \"/run/user/3001/containers/networks/rootless-netns/rootless-netns\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Created root filesystem for container 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14 at /home/podman_basic_user/.local/share/containers/storage/overlay/e5c22c8b395272c734ae1c2b04ba2a8f0f7d50cc693da01faa29cf467d4dc28f/merged" time="2025-01-04T11:32:01-05:00" level=debug msg="pasta arguments: --config-net --pid /run/user/3001/containers/networks/rootless-netns/rootless-netns-conn.pid --dns-forward 169.254.1.1 -t none -u none -T none -U none --no-map-gw --quiet --netns /run/user/3001/containers/networks/rootless-netns/rootless-netns --map-guest-addr 169.254.1.2" time="2025-01-04T11:32:01-05:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [INFO netavark::firewall::nft] Creating container chain nv_aac90379_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "--user", "/usr/libexec/podman/aardvark-dns", "--config", "/run/user/3001/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "c2:a4:da:a9:fd:62", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2025-01-04T11:32:01-05:00" level=debug msg="rootlessport: time=\"2025-01-04T11:32:01-05:00\" level=info msg=\"Starting parent driver\"\n" time="2025-01-04T11:32:01-05:00" level=debug msg="rootlessport: time=\"2025-01-04T11:32:01-05:00\" level=info msg=\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport56212955/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport56212955/.bp.sock]\"\n" time="2025-01-04T11:32:01-05:00" level=debug msg="rootlessport: time=\"2025-01-04T11:32:01-05:00\" level=info msg=\"Starting child driver in child netns (\\\"/proc/self/exe\\\" [rootlessport-child])\"\n" time="2025-01-04T11:32:01-05:00" level=debug msg="rootlessport: time=\"2025-01-04T11:32:01-05:00\" level=info msg=\"Waiting for initComplete\"\n" time="2025-01-04T11:32:01-05:00" level=debug msg="rootlessport: time=\"2025-01-04T11:32:01-05:00\" level=info msg=\"initComplete is closed; parent and child established the communication channel\"\n" time="2025-01-04T11:32:01-05:00" level=debug msg="rootlessport: time=\"2025-01-04T11:32:01-05:00\" level=info msg=\"Exposing ports [{ 80 15001 1 tcp}]\"\n" time="2025-01-04T11:32:01-05:00" level=debug msg="rootlessport: time=\"2025-01-04T11:32:01-05:00\" level=info msg=Ready\n" time="2025-01-04T11:32:01-05:00" level=debug msg="rootlessport is ready" time="2025-01-04T11:32:01-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-04T11:32:01-05:00" level=debug msg="Setting Cgroups for container 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14 to user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice:libpod:219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14" time="2025-01-04T11:32:01-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-01-04T11:32:01-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/home/podman_basic_user/.local/share/containers/storage/overlay/e5c22c8b395272c734ae1c2b04ba2a8f0f7d50cc693da01faa29cf467d4dc28f/merged\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Created OCI spec for container 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14/userdata/config.json" time="2025-01-04T11:32:01-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice for parent user.slice and name libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e" time="2025-01-04T11:32:01-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice" time="2025-01-04T11:32:01-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice" time="2025-01-04T11:32:01-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-01-04T11:32:01-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14 -u 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14 -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14/userdata -p /run/user/3001/containers/overlay-containers/219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14/userdata/pidfile -n 4066b38bb5e8-infra --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14 --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14]" time="2025-01-04T11:32:01-05:00" level=info msg="Running conmon under slice user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice and unitName libpod-conmon-219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-01-04T11:32:01-05:00" level=debug msg="Received: 24518" time="2025-01-04T11:32:01-05:00" level=info msg="Got Conmon PID as 24516" time="2025-01-04T11:32:01-05:00" level=debug msg="Created container 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14 in OCI runtime" time="2025-01-04T11:32:01-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-01-04T11:32:01-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-01-04T11:32:01-05:00" level=debug msg="Starting container 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14 with command [/catatonit -P]" time="2025-01-04T11:32:01-05:00" level=debug msg="Started container 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14" time="2025-01-04T11:32:01-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/7GE4TEETLFYRITOUFT43LDAT5V,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/bb5bb6e04d077cff776259eea1e86734ad761d83e0bbb003ce41a2d225d9c3a4/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/bb5bb6e04d077cff776259eea1e86734ad761d83e0bbb003ce41a2d225d9c3a4/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c57,c559\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Mounted container \"a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/bb5bb6e04d077cff776259eea1e86734ad761d83e0bbb003ce41a2d225d9c3a4/merged\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Created root filesystem for container a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079 at /home/podman_basic_user/.local/share/containers/storage/overlay/bb5bb6e04d077cff776259eea1e86734ad761d83e0bbb003ce41a2d225d9c3a4/merged" time="2025-01-04T11:32:01-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-04T11:32:01-05:00" level=debug msg="Setting Cgroups for container a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079 to user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice:libpod:a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079" time="2025-01-04T11:32:01-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-01-04T11:32:01-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-01-04T11:32:01-05:00" level=debug msg="Created OCI spec for container a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079/userdata/config.json" time="2025-01-04T11:32:01-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice for parent user.slice and name libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e" time="2025-01-04T11:32:01-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice" time="2025-01-04T11:32:01-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice" time="2025-01-04T11:32:01-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-01-04T11:32:01-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079 -u a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079 -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079/userdata -p /run/user/3001/containers/overlay-containers/a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079 --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079]" time="2025-01-04T11:32:01-05:00" level=info msg="Running conmon under slice user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice and unitName libpod-conmon-a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-01-04T11:32:01-05:00" level=debug msg="Received: 24523" time="2025-01-04T11:32:01-05:00" level=info msg="Got Conmon PID as 24521" time="2025-01-04T11:32:01-05:00" level=debug msg="Created container a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079 in OCI runtime" time="2025-01-04T11:32:01-05:00" level=debug msg="Starting container a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079 with command [/bin/busybox-extras httpd -f -p 80]" time="2025-01-04T11:32:01-05:00" level=debug msg="Started container a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079" time="2025-01-04T11:32:01-05:00" level=debug msg="Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-01-04T11:32:01-05:00" level=debug msg="Shutting down engines" time="2025-01-04T11:32:01-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=24414 Jan 04 11:32:01 managed-node2 python3.12[24407]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jan 04 11:32:01 managed-node2 sudo[24404]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 04 11:32:01 managed-node2 sudo[24697]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zjxdatdlmcvreiirvpracgetnfazsrtv ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736008321.735781-14482-279456464405913/AnsiballZ_systemd.py' Jan 04 11:32:01 managed-node2 sudo[24697]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24697) opened. Jan 04 11:32:02 managed-node2 sudo[24697]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 04 11:32:02 managed-node2 python3.12[24700]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:32:02 managed-node2 systemd[23226]: Reload requested from client PID 24701 ('systemctl')... Jan 04 11:32:02 managed-node2 systemd[23226]: Reloading... Jan 04 11:32:02 managed-node2 systemd[23226]: Reloading finished in 45 ms. Jan 04 11:32:02 managed-node2 sudo[24697]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 04 11:32:02 managed-node2 sudo[24884]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cbzuxxgmxfkujbixziuexqothcnmvbki ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736008322.485175-14525-121423282136574/AnsiballZ_systemd.py' Jan 04 11:32:02 managed-node2 sudo[24884]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24884) opened. Jan 04 11:32:02 managed-node2 sudo[24884]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 04 11:32:02 managed-node2 python3.12[24887]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Jan 04 11:32:02 managed-node2 systemd[23226]: Reload requested from client PID 24890 ('systemctl')... Jan 04 11:32:02 managed-node2 systemd[23226]: Reloading... Jan 04 11:32:03 managed-node2 systemd[23226]: Reloading finished in 43 ms. Jan 04 11:32:03 managed-node2 sudo[24884]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 04 11:32:03 managed-node2 sudo[25073]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cokjvrtefywltxriyjuvpxsvgvsdtezw ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736008323.2257214-14564-59845127645483/AnsiballZ_systemd.py' Jan 04 11:32:03 managed-node2 sudo[25073]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-25073) opened. Jan 04 11:32:03 managed-node2 sudo[25073]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 04 11:32:03 managed-node2 python3.12[25076]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 04 11:32:03 managed-node2 systemd[23226]: Created slice app-podman\x2dkube.slice - Slice /app/podman-kube. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 83. Jan 04 11:32:03 managed-node2 systemd[23226]: Starting podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 71. Jan 04 11:32:03 managed-node2 aardvark-dns[24498]: Received SIGHUP Jan 04 11:32:03 managed-node2 aardvark-dns[24498]: Successfully parsed config Jan 04 11:32:03 managed-node2 aardvark-dns[24498]: Listen v4 ip {} Jan 04 11:32:03 managed-node2 aardvark-dns[24498]: Listen v6 ip {} Jan 04 11:32:03 managed-node2 aardvark-dns[24498]: No configuration found stopping the sever Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14)" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=info msg="Using sqlite as database backend" Jan 04 11:32:03 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 04 11:32:03 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 04 11:32:03 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 04 11:32:03 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="systemd-logind: Unknown object '/'." Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Using graph driver overlay" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Using run root /run/user/3001/containers" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Using transient store: false" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Cached value indicated that native-diff is usable" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Initializing event backend file" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=info msg="Setting parallel job count to 7" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14)" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Shutting down engines" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=25091 Jan 04 11:32:13 managed-node2 podman[25079]: time="2025-01-04T11:32:13-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd1-httpd1 in 10 seconds, resorting to SIGKILL" Jan 04 11:32:13 managed-node2 conmon[24521]: conmon a6beb024b5d86fca9df6 : container 24523 exited with status 137 Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079)" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=info msg="Using sqlite as database backend" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="systemd-logind: Unknown object '/'." Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Using graph driver overlay" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Using run root /run/user/3001/containers" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Using transient store: false" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Cached value indicated that native-diff is usable" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Initializing event backend file" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=info msg="Setting parallel job count to 7" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079)" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Shutting down engines" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=25108 Jan 04 11:32:13 managed-node2 systemd[23226]: Stopped libpod-conmon-a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 85 and the job result is done. Jan 04 11:32:13 managed-node2 systemd[23226]: Removed slice user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice - cgroup user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 84 and the job result is done. Jan 04 11:32:13 managed-node2 systemd[23226]: user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice: No such file or directory Jan 04 11:32:13 managed-node2 podman[25079]: Pods stopped: Jan 04 11:32:13 managed-node2 podman[25079]: 4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e Jan 04 11:32:13 managed-node2 podman[25079]: Pods removed: Jan 04 11:32:13 managed-node2 podman[25079]: 4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e Jan 04 11:32:13 managed-node2 podman[25079]: Secrets removed: Jan 04 11:32:13 managed-node2 podman[25079]: Volumes removed: Jan 04 11:32:13 managed-node2 systemd[23226]: Created slice user-libpod_pod_fe903ad2ec2383adc1a4b72155ae3452d6d7505e7c9633f4ca79e65d6dc06b07.slice - cgroup user-libpod_pod_fe903ad2ec2383adc1a4b72155ae3452d6d7505e7c9633f4ca79e65d6dc06b07.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 86. Jan 04 11:32:14 managed-node2 systemd[23226]: Started libpod-bf4229278cbf719d6d75f11751a80a284dd3256cefd415e800ab5f5cdc883e59.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 90. Jan 04 11:32:14 managed-node2 systemd[23226]: Started rootless-netns-4d92c901.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 94. Jan 04 11:32:14 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 04 11:32:14 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 04 11:32:14 managed-node2 kernel: veth0: entered allmulticast mode Jan 04 11:32:14 managed-node2 kernel: veth0: entered promiscuous mode Jan 04 11:32:14 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 04 11:32:14 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 04 11:32:14 managed-node2 systemd[23226]: Started run-rf8300fa8dbe44c88a447b2bb61cb38c6.scope - /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 98. Jan 04 11:32:14 managed-node2 systemd[23226]: Started libpod-2d741e0e82710b800fa6f0694418630efa8b8cb2b0ee333b2427e20d818729bb.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 102. Jan 04 11:32:14 managed-node2 systemd[23226]: Started libpod-af7f2944597961c213714ce5393fe4d7a79783462defc3e71f2197e0c64d6129.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 107. Jan 04 11:32:14 managed-node2 podman[25079]: Pod: Jan 04 11:32:14 managed-node2 podman[25079]: fe903ad2ec2383adc1a4b72155ae3452d6d7505e7c9633f4ca79e65d6dc06b07 Jan 04 11:32:14 managed-node2 podman[25079]: Container: Jan 04 11:32:14 managed-node2 podman[25079]: af7f2944597961c213714ce5393fe4d7a79783462defc3e71f2197e0c64d6129 Jan 04 11:32:14 managed-node2 systemd[23226]: Started podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 71. Jan 04 11:32:14 managed-node2 sudo[25073]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 04 11:32:15 managed-node2 python3.12[25298]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 04 11:32:15 managed-node2 python3.12[25430]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:32:17 managed-node2 python3.12[25563]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:32:18 managed-node2 python3.12[25695]: ansible-file Invoked with path=/tmp/lsr_83t3r8ir_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:32:19 managed-node2 python3.12[25826]: ansible-file Invoked with path=/tmp/lsr_83t3r8ir_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:32:19 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 04 11:32:19 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 04 11:32:20 managed-node2 podman[25988]: 2025-01-04 11:32:20.623434175 -0500 EST m=+0.771239383 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 04 11:32:20 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 04 11:32:21 managed-node2 python3.12[26134]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:32:21 managed-node2 python3.12[26265]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:32:21 managed-node2 python3.12[26396]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 04 11:32:22 managed-node2 python3.12[26501]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736008341.6730418-15415-21223707228100/.source.yml _original_basename=._e6nowhb follow=False checksum=d85a9207eebdee136a70d7cab8884e5a6c38780a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:32:22 managed-node2 python3.12[26632]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 04 11:32:22 managed-node2 systemd[1]: Created slice machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice - cgroup machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice. ░░ Subject: A start job for unit machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice has finished successfully. ░░ ░░ The job identifier is 2057. Jan 04 11:32:22 managed-node2 podman[26639]: 2025-01-04 11:32:22.896568746 -0500 EST m=+0.069621115 container create 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 (image=localhost/podman-pause:5.3.1-1733097600, name=ce4a4f29304b-infra, pod_id=ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a, io.buildah.version=1.38.0) Jan 04 11:32:22 managed-node2 podman[26639]: 2025-01-04 11:32:22.903197319 -0500 EST m=+0.076249656 pod create ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a (image=, name=httpd2) Jan 04 11:32:22 managed-node2 podman[26639]: 2025-01-04 11:32:22.931209649 -0500 EST m=+0.104261986 container create b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test) Jan 04 11:32:22 managed-node2 podman[26639]: 2025-01-04 11:32:22.905669156 -0500 EST m=+0.078721561 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 04 11:32:22 managed-node2 NetworkManager[784]: [1736008342.9478] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Jan 04 11:32:22 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 04 11:32:22 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 04 11:32:22 managed-node2 kernel: veth0: entered allmulticast mode Jan 04 11:32:22 managed-node2 kernel: veth0: entered promiscuous mode Jan 04 11:32:22 managed-node2 NetworkManager[784]: [1736008342.9582] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Jan 04 11:32:22 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 04 11:32:22 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 04 11:32:22 managed-node2 NetworkManager[784]: [1736008342.9616] device (veth0): carrier: link connected Jan 04 11:32:22 managed-node2 NetworkManager[784]: [1736008342.9620] device (podman1): carrier: link connected Jan 04 11:32:22 managed-node2 (udev-worker)[26654]: Network interface NamePolicy= disabled on kernel command line. Jan 04 11:32:22 managed-node2 (udev-worker)[26653]: Network interface NamePolicy= disabled on kernel command line. Jan 04 11:32:23 managed-node2 NetworkManager[784]: [1736008343.0105] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 04 11:32:23 managed-node2 NetworkManager[784]: [1736008343.0111] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 04 11:32:23 managed-node2 NetworkManager[784]: [1736008343.0164] device (podman1): Activation: starting connection 'podman1' (546e9f42-6299-437b-9679-89347f4cb263) Jan 04 11:32:23 managed-node2 NetworkManager[784]: [1736008343.0192] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 04 11:32:23 managed-node2 NetworkManager[784]: [1736008343.0195] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 04 11:32:23 managed-node2 NetworkManager[784]: [1736008343.0196] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 04 11:32:23 managed-node2 NetworkManager[784]: [1736008343.0200] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 04 11:32:23 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2064. Jan 04 11:32:23 managed-node2 systemd[1]: Started run-r619f906cfc804e95b41ee657f03b0cc1.scope - /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-r619f906cfc804e95b41ee657f03b0cc1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r619f906cfc804e95b41ee657f03b0cc1.scope has finished successfully. ░░ ░░ The job identifier is 2143. Jan 04 11:32:23 managed-node2 aardvark-dns[26678]: starting aardvark on a child with pid 26683 Jan 04 11:32:23 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2064. Jan 04 11:32:23 managed-node2 aardvark-dns[26683]: Successfully parsed config Jan 04 11:32:23 managed-node2 aardvark-dns[26683]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Jan 04 11:32:23 managed-node2 aardvark-dns[26683]: Listen v6 ip {} Jan 04 11:32:23 managed-node2 aardvark-dns[26683]: Using the following upstream servers: [10.29.169.13:53, 10.29.170.12:53, 10.2.32.1:53] Jan 04 11:32:23 managed-node2 NetworkManager[784]: [1736008343.0656] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 04 11:32:23 managed-node2 NetworkManager[784]: [1736008343.0665] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 04 11:32:23 managed-node2 NetworkManager[784]: [1736008343.0669] device (podman1): Activation: successful, device activated. Jan 04 11:32:23 managed-node2 systemd[1]: Started libpod-conmon-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19.scope. ░░ Subject: A start job for unit libpod-conmon-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19.scope has finished successfully. ░░ ░░ The job identifier is 2149. Jan 04 11:32:23 managed-node2 conmon[26694]: conmon 72a074f2c404d4bcdd6f : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Jan 04 11:32:23 managed-node2 conmon[26694]: conmon 72a074f2c404d4bcdd6f : terminal_ctrl_fd: 13 Jan 04 11:32:23 managed-node2 conmon[26694]: conmon 72a074f2c404d4bcdd6f : winsz read side: 17, winsz write side: 18 Jan 04 11:32:23 managed-node2 systemd[1]: Started libpod-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19.scope - libcrun container. ░░ Subject: A start job for unit libpod-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19.scope has finished successfully. ░░ ░░ The job identifier is 2156. Jan 04 11:32:23 managed-node2 conmon[26694]: conmon 72a074f2c404d4bcdd6f : container PID: 26696 Jan 04 11:32:23 managed-node2 podman[26639]: 2025-01-04 11:32:23.140855181 -0500 EST m=+0.313907674 container init 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 (image=localhost/podman-pause:5.3.1-1733097600, name=ce4a4f29304b-infra, pod_id=ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a, io.buildah.version=1.38.0) Jan 04 11:32:23 managed-node2 podman[26639]: 2025-01-04 11:32:23.143340029 -0500 EST m=+0.316392511 container start 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 (image=localhost/podman-pause:5.3.1-1733097600, name=ce4a4f29304b-infra, pod_id=ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a, io.buildah.version=1.38.0) Jan 04 11:32:23 managed-node2 systemd[1]: Started libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope. ░░ Subject: A start job for unit libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope has finished successfully. ░░ ░░ The job identifier is 2163. Jan 04 11:32:23 managed-node2 conmon[26700]: conmon b51a9b2b65af4d9c04e3 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach} Jan 04 11:32:23 managed-node2 conmon[26700]: conmon b51a9b2b65af4d9c04e3 : terminal_ctrl_fd: 12 Jan 04 11:32:23 managed-node2 conmon[26700]: conmon b51a9b2b65af4d9c04e3 : winsz read side: 16, winsz write side: 17 Jan 04 11:32:23 managed-node2 systemd[1]: Started libpod-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope - libcrun container. ░░ Subject: A start job for unit libpod-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope has finished successfully. ░░ ░░ The job identifier is 2170. Jan 04 11:32:23 managed-node2 conmon[26700]: conmon b51a9b2b65af4d9c04e3 : container PID: 26702 Jan 04 11:32:23 managed-node2 podman[26639]: 2025-01-04 11:32:23.200713794 -0500 EST m=+0.373766192 container init b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Jan 04 11:32:23 managed-node2 podman[26639]: 2025-01-04 11:32:23.203240701 -0500 EST m=+0.376293260 container start b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jan 04 11:32:23 managed-node2 podman[26639]: 2025-01-04 11:32:23.209148556 -0500 EST m=+0.382200935 pod start ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a (image=, name=httpd2) Jan 04 11:32:23 managed-node2 python3.12[26632]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Jan 04 11:32:23 managed-node2 python3.12[26632]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a Container: b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 Jan 04 11:32:23 managed-node2 python3.12[26632]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-01-04T11:32:22-05:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2025-01-04T11:32:22-05:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-01-04T11:32:22-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-01-04T11:32:22-05:00" level=info msg="Using sqlite as database backend" time="2025-01-04T11:32:22-05:00" level=debug msg="Using graph driver overlay" time="2025-01-04T11:32:22-05:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Using run root /run/containers/storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2025-01-04T11:32:22-05:00" level=debug msg="Using tmp dir /run/libpod" time="2025-01-04T11:32:22-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2025-01-04T11:32:22-05:00" level=debug msg="Using transient store: false" time="2025-01-04T11:32:22-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-04T11:32:22-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-04T11:32:22-05:00" level=debug msg="Cached value indicated that metacopy is being used" time="2025-01-04T11:32:22-05:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2025-01-04T11:32:22-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2025-01-04T11:32:22-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2025-01-04T11:32:22-05:00" level=debug msg="Initializing event backend journald" time="2025-01-04T11:32:22-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2025-01-04T11:32:22-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-01-04T11:32:22-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-01-04T11:32:22-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-01-04T11:32:22-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-01-04T11:32:22-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-01-04T11:32:22-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2025-01-04T11:32:22-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-01-04T11:32:22-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-01-04T11:32:22-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2025-01-04T11:32:22-05:00" level=info msg="Setting parallel job count to 7" time="2025-01-04T11:32:22-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network abeb9dad79b870d27d9a2e0372462704dea67bd90414ac62e777bb7e3f0f98fc bridge podman1 2025-01-04 11:30:09.084162695 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-01-04T11:32:22-05:00" level=debug msg="Successfully loaded 2 networks" time="2025-01-04T11:32:22-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:22-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-04T11:32:22-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994)" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Pod using bridge network mode" time="2025-01-04T11:32:22-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice for parent machine.slice and name libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a" time="2025-01-04T11:32:22-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice" time="2025-01-04T11:32:22-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice" time="2025-01-04T11:32:22-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:22-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-04T11:32:22-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994)" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Inspecting image c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Inspecting image c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994" time="2025-01-04T11:32:22-05:00" level=debug msg="Inspecting image c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994" time="2025-01-04T11:32:22-05:00" level=debug msg="using systemd mode: false" time="2025-01-04T11:32:22-05:00" level=debug msg="setting container name ce4a4f29304b-infra" time="2025-01-04T11:32:22-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Allocated lock 1 for container 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are supported" time="2025-01-04T11:32:22-05:00" level=debug msg="Created container \"72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Container \"72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19\" has work directory \"/var/lib/containers/storage/overlay-containers/72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19/userdata\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Container \"72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19\" has run directory \"/run/containers/storage/overlay-containers/72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19/userdata\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:22-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-04T11:32:22-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-01-04T11:32:22-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:22-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-04T11:32:22-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:22-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-04T11:32:22-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:22-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-04T11:32:22-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-04T11:32:22-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-04T11:32:22-05:00" level=debug msg="using systemd mode: false" time="2025-01-04T11:32:22-05:00" level=debug msg="adding container to pod httpd2" time="2025-01-04T11:32:22-05:00" level=debug msg="setting container name httpd2-httpd2" time="2025-01-04T11:32:22-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-04T11:32:22-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-01-04T11:32:22-05:00" level=debug msg="Adding mount /proc" time="2025-01-04T11:32:22-05:00" level=debug msg="Adding mount /dev" time="2025-01-04T11:32:22-05:00" level=debug msg="Adding mount /dev/pts" time="2025-01-04T11:32:22-05:00" level=debug msg="Adding mount /dev/mqueue" time="2025-01-04T11:32:22-05:00" level=debug msg="Adding mount /sys" time="2025-01-04T11:32:22-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-01-04T11:32:22-05:00" level=debug msg="Allocated lock 2 for container b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Created container \"b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Container \"b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026\" has work directory \"/var/lib/containers/storage/overlay-containers/b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026/userdata\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Container \"b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026\" has run directory \"/run/containers/storage/overlay-containers/b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026/userdata\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Strongconnecting node 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19" time="2025-01-04T11:32:22-05:00" level=debug msg="Pushed 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 onto stack" time="2025-01-04T11:32:22-05:00" level=debug msg="Finishing node 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19. Popped 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 off stack" time="2025-01-04T11:32:22-05:00" level=debug msg="Strongconnecting node b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026" time="2025-01-04T11:32:22-05:00" level=debug msg="Pushed b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 onto stack" time="2025-01-04T11:32:22-05:00" level=debug msg="Finishing node b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026. Popped b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 off stack" time="2025-01-04T11:32:22-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/TQOTJL77U4SKGQOWPJFZ5KETQN,upperdir=/var/lib/containers/storage/overlay/04acd098cd25fb66ec04f609cc7975a552b6156d7496ae563fab2f0431094053/diff,workdir=/var/lib/containers/storage/overlay/04acd098cd25fb66ec04f609cc7975a552b6156d7496ae563fab2f0431094053/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c285,c966\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Mounted container \"72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19\" at \"/var/lib/containers/storage/overlay/04acd098cd25fb66ec04f609cc7975a552b6156d7496ae563fab2f0431094053/merged\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Created root filesystem for container 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 at /var/lib/containers/storage/overlay/04acd098cd25fb66ec04f609cc7975a552b6156d7496ae563fab2f0431094053/merged" time="2025-01-04T11:32:22-05:00" level=debug msg="Made network namespace at /run/netns/netns-a4cf92d5-1e89-ffca-31b5-4f80cdedf748 for container 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [DEBUG netavark::firewall::firewalld] Adding firewalld rules for network 10.89.0.0/24 [DEBUG netavark::firewall::firewalld] Adding subnet 10.89.0.0/24 to zone trusted as source [INFO netavark::firewall::nft] Creating container chain nv_abeb9dad_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "/usr/libexec/podman/aardvark-dns", "--config", "/run/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "6a:fb:ed:cc:d3:eb", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2025-01-04T11:32:23-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-04T11:32:23-05:00" level=debug msg="Setting Cgroups for container 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 to machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice:libpod:72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19" time="2025-01-04T11:32:23-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-01-04T11:32:23-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/var/lib/containers/storage/overlay/04acd098cd25fb66ec04f609cc7975a552b6156d7496ae563fab2f0431094053/merged\"" time="2025-01-04T11:32:23-05:00" level=debug msg="Created OCI spec for container 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 at /var/lib/containers/storage/overlay-containers/72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19/userdata/config.json" time="2025-01-04T11:32:23-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice for parent machine.slice and name libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a" time="2025-01-04T11:32:23-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice" time="2025-01-04T11:32:23-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice" time="2025-01-04T11:32:23-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-01-04T11:32:23-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 -u 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19/userdata -p /run/containers/storage/overlay-containers/72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19/userdata/pidfile -n ce4a4f29304b-infra --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19]" time="2025-01-04T11:32:23-05:00" level=info msg="Running conmon under slice machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice and unitName libpod-conmon-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19.scope" time="2025-01-04T11:32:23-05:00" level=debug msg="Received: 26696" time="2025-01-04T11:32:23-05:00" level=info msg="Got Conmon PID as 26694" time="2025-01-04T11:32:23-05:00" level=debug msg="Created container 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 in OCI runtime" time="2025-01-04T11:32:23-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-01-04T11:32:23-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-01-04T11:32:23-05:00" level=debug msg="Starting container 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 with command [/catatonit -P]" time="2025-01-04T11:32:23-05:00" level=debug msg="Started container 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19" time="2025-01-04T11:32:23-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/QUTKPFALPWKAMIPWEKEV6F457D,upperdir=/var/lib/containers/storage/overlay/284396ee3df2b9d4d6a5f1cba6ed473c8f134be852dee9a535fc7b15dcb509bf/diff,workdir=/var/lib/containers/storage/overlay/284396ee3df2b9d4d6a5f1cba6ed473c8f134be852dee9a535fc7b15dcb509bf/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c285,c966\"" time="2025-01-04T11:32:23-05:00" level=debug msg="Mounted container \"b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026\" at \"/var/lib/containers/storage/overlay/284396ee3df2b9d4d6a5f1cba6ed473c8f134be852dee9a535fc7b15dcb509bf/merged\"" time="2025-01-04T11:32:23-05:00" level=debug msg="Created root filesystem for container b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 at /var/lib/containers/storage/overlay/284396ee3df2b9d4d6a5f1cba6ed473c8f134be852dee9a535fc7b15dcb509bf/merged" time="2025-01-04T11:32:23-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-04T11:32:23-05:00" level=debug msg="Setting Cgroups for container b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 to machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice:libpod:b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026" time="2025-01-04T11:32:23-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-01-04T11:32:23-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-01-04T11:32:23-05:00" level=debug msg="Created OCI spec for container b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 at /var/lib/containers/storage/overlay-containers/b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026/userdata/config.json" time="2025-01-04T11:32:23-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice for parent machine.slice and name libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a" time="2025-01-04T11:32:23-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice" time="2025-01-04T11:32:23-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice" time="2025-01-04T11:32:23-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-01-04T11:32:23-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 -u b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026/userdata -p /run/containers/storage/overlay-containers/b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026]" time="2025-01-04T11:32:23-05:00" level=info msg="Running conmon under slice machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice and unitName libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope" time="2025-01-04T11:32:23-05:00" level=debug msg="Received: 26702" time="2025-01-04T11:32:23-05:00" level=info msg="Got Conmon PID as 26700" time="2025-01-04T11:32:23-05:00" level=debug msg="Created container b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 in OCI runtime" time="2025-01-04T11:32:23-05:00" level=debug msg="Starting container b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 with command [/bin/busybox-extras httpd -f -p 80]" time="2025-01-04T11:32:23-05:00" level=debug msg="Started container b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026" time="2025-01-04T11:32:23-05:00" level=debug msg="Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-01-04T11:32:23-05:00" level=debug msg="Shutting down engines" time="2025-01-04T11:32:23-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=26639 Jan 04 11:32:23 managed-node2 python3.12[26632]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jan 04 11:32:23 managed-node2 python3.12[26834]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:32:23 managed-node2 systemd[1]: Reload requested from client PID 26835 ('systemctl') (unit session-5.scope)... Jan 04 11:32:23 managed-node2 systemd[1]: Reloading... Jan 04 11:32:23 managed-node2 systemd[1]: Reloading finished in 204 ms. Jan 04 11:32:24 managed-node2 python3.12[27023]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Jan 04 11:32:24 managed-node2 systemd[1]: Reload requested from client PID 27026 ('systemctl') (unit session-5.scope)... Jan 04 11:32:24 managed-node2 systemd[1]: Reloading... Jan 04 11:32:24 managed-node2 systemd[1]: Reloading finished in 198 ms. Jan 04 11:32:25 managed-node2 python3.12[27213]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 04 11:32:25 managed-node2 systemd[1]: Created slice system-podman\x2dkube.slice - Slice /system/podman-kube. ░░ Subject: A start job for unit system-podman\x2dkube.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit system-podman\x2dkube.slice has finished successfully. ░░ ░░ The job identifier is 2255. Jan 04 11:32:25 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution. ░░ ░░ The job identifier is 2177. Jan 04 11:32:25 managed-node2 podman[27217]: 2025-01-04 11:32:25.50935022 -0500 EST m=+0.024088006 pod stop ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a (image=, name=httpd2) Jan 04 11:32:25 managed-node2 systemd[1]: libpod-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19.scope has successfully entered the 'dead' state. Jan 04 11:32:25 managed-node2 podman[27217]: 2025-01-04 11:32:25.539790672 -0500 EST m=+0.054528283 container died 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 (image=localhost/podman-pause:5.3.1-1733097600, name=ce4a4f29304b-infra, io.buildah.version=1.38.0) Jan 04 11:32:25 managed-node2 aardvark-dns[26683]: Received SIGHUP Jan 04 11:32:25 managed-node2 aardvark-dns[26683]: Successfully parsed config Jan 04 11:32:25 managed-node2 aardvark-dns[26683]: Listen v4 ip {} Jan 04 11:32:25 managed-node2 aardvark-dns[26683]: Listen v6 ip {} Jan 04 11:32:25 managed-node2 aardvark-dns[26683]: No configuration found stopping the sever Jan 04 11:32:25 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 04 11:32:25 managed-node2 systemd[1]: run-r619f906cfc804e95b41ee657f03b0cc1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r619f906cfc804e95b41ee657f03b0cc1.scope has successfully entered the 'dead' state. Jan 04 11:32:25 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 04 11:32:25 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 04 11:32:25 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19)" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=info msg="Using sqlite as database backend" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Using graph driver overlay" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Using run root /run/containers/storage" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Using tmp dir /run/libpod" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Using transient store: false" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Cached value indicated that metacopy is being used" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Initializing event backend journald" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=info msg="Setting parallel job count to 7" Jan 04 11:32:25 managed-node2 NetworkManager[784]: [1736008345.5771] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 04 11:32:25 managed-node2 systemd[1]: run-netns-netns\x2da4cf92d5\x2d1e89\x2dffca\x2d31b5\x2d4f80cdedf748.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2da4cf92d5\x2d1e89\x2dffca\x2d31b5\x2d4f80cdedf748.mount has successfully entered the 'dead' state. Jan 04 11:32:25 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19-userdata-shm.mount has successfully entered the 'dead' state. Jan 04 11:32:25 managed-node2 systemd[1]: var-lib-containers-storage-overlay-04acd098cd25fb66ec04f609cc7975a552b6156d7496ae563fab2f0431094053-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-04acd098cd25fb66ec04f609cc7975a552b6156d7496ae563fab2f0431094053-merged.mount has successfully entered the 'dead' state. Jan 04 11:32:25 managed-node2 podman[27217]: 2025-01-04 11:32:25.655948729 -0500 EST m=+0.170686278 container cleanup 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 (image=localhost/podman-pause:5.3.1-1733097600, name=ce4a4f29304b-infra, pod_id=ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a, io.buildah.version=1.38.0) Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19)" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Shutting down engines" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=27229 Jan 04 11:32:25 managed-node2 systemd[1]: libpod-conmon-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19.scope has successfully entered the 'dead' state. Jan 04 11:32:27 managed-node2 systemd[4487]: Created slice background.slice - User Background Tasks Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Jan 04 11:32:27 managed-node2 systemd[4487]: Starting systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 04 11:32:27 managed-node2 systemd[4487]: Finished systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 04 11:32:35 managed-node2 podman[27217]: time="2025-01-04T11:32:35-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL" Jan 04 11:32:35 managed-node2 conmon[26700]: conmon b51a9b2b65af4d9c04e3 : container 26702 exited with status 137 Jan 04 11:32:35 managed-node2 systemd[1]: libpod-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope has successfully entered the 'dead' state. Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.538086285 -0500 EST m=+10.052824129 container died b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026)" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=info msg="Using sqlite as database backend" Jan 04 11:32:35 managed-node2 systemd[1]: var-lib-containers-storage-overlay-284396ee3df2b9d4d6a5f1cba6ed473c8f134be852dee9a535fc7b15dcb509bf-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-284396ee3df2b9d4d6a5f1cba6ed473c8f134be852dee9a535fc7b15dcb509bf-merged.mount has successfully entered the 'dead' state. Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Using graph driver overlay" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Using run root /run/containers/storage" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Using tmp dir /run/libpod" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Using transient store: false" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Cached value indicated that metacopy is being used" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Initializing event backend journald" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=info msg="Setting parallel job count to 7" Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.576277326 -0500 EST m=+10.091014877 container cleanup b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=info msg="Received shutdown signal \"terminated\", terminating!" PID=27253 Jan 04 11:32:35 managed-node2 systemd[1]: Stopping libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope... ░░ Subject: A stop job for unit libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope has begun execution. ░░ ░░ The job identifier is 2263. Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=info msg="Invoking shutdown handler \"libpod\"" PID=27253 Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Completed shutdown handler \"libpod\", duration 0s" PID=27253 Jan 04 11:32:35 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 04 11:32:35 managed-node2 systemd[1]: libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope has successfully entered the 'dead' state. Jan 04 11:32:35 managed-node2 systemd[1]: Stopped libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope. ░░ Subject: A stop job for unit libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope has finished. ░░ ░░ The job identifier is 2263 and the job result is done. Jan 04 11:32:35 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 04 11:32:35 managed-node2 systemd[1]: Removed slice machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice - cgroup machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice. ░░ Subject: A stop job for unit machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice has finished. ░░ ░░ The job identifier is 2262 and the job result is done. Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.6383497 -0500 EST m=+10.153087243 container remove b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.665355307 -0500 EST m=+10.180092861 container remove 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 (image=localhost/podman-pause:5.3.1-1733097600, name=ce4a4f29304b-infra, pod_id=ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a, io.buildah.version=1.38.0) Jan 04 11:32:35 managed-node2 systemd[1]: machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice: Failed to open /run/systemd/transient/machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice: No such file or directory Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.674688605 -0500 EST m=+10.189426110 pod remove ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a (image=, name=httpd2) Jan 04 11:32:35 managed-node2 podman[27217]: Pods stopped: Jan 04 11:32:35 managed-node2 podman[27217]: ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a Jan 04 11:32:35 managed-node2 podman[27217]: Pods removed: Jan 04 11:32:35 managed-node2 podman[27217]: ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a Jan 04 11:32:35 managed-node2 podman[27217]: Secrets removed: Jan 04 11:32:35 managed-node2 podman[27217]: Volumes removed: Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.699007794 -0500 EST m=+10.213745409 container create 23724cd2b98cf70cd15894b6500ab24bc5f44b0ed864a28ddbed6859c312ed76 (image=localhost/podman-pause:5.3.1-1733097600, name=c8c001b59877-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 04 11:32:35 managed-node2 systemd[1]: Created slice machine-libpod_pod_ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f.slice - cgroup machine-libpod_pod_ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f.slice. ░░ Subject: A start job for unit machine-libpod_pod_ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f.slice has finished successfully. ░░ ░░ The job identifier is 2264. Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.743858656 -0500 EST m=+10.258596283 container create dbc2b9607a4ca55a5a7069f06017b2c67ca7a59d57bf39bfedfb115ed16ec91f (image=localhost/podman-pause:5.3.1-1733097600, name=ecaac7b287a8-infra, pod_id=ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.749872146 -0500 EST m=+10.264609684 pod create ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f (image=, name=httpd2) Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.751842762 -0500 EST m=+10.266580444 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.774790691 -0500 EST m=+10.289528239 container create f27f043c4df71017c1bd967698eecbb37685099825c3c94f42622aa2493af37f (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test) Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.775085864 -0500 EST m=+10.289823419 container restart 23724cd2b98cf70cd15894b6500ab24bc5f44b0ed864a28ddbed6859c312ed76 (image=localhost/podman-pause:5.3.1-1733097600, name=c8c001b59877-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 04 11:32:35 managed-node2 systemd[1]: Started libpod-23724cd2b98cf70cd15894b6500ab24bc5f44b0ed864a28ddbed6859c312ed76.scope - libcrun container. ░░ Subject: A start job for unit libpod-23724cd2b98cf70cd15894b6500ab24bc5f44b0ed864a28ddbed6859c312ed76.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-23724cd2b98cf70cd15894b6500ab24bc5f44b0ed864a28ddbed6859c312ed76.scope has finished successfully. ░░ ░░ The job identifier is 2270. Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.845833248 -0500 EST m=+10.360570912 container init 23724cd2b98cf70cd15894b6500ab24bc5f44b0ed864a28ddbed6859c312ed76 (image=localhost/podman-pause:5.3.1-1733097600, name=c8c001b59877-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.848320069 -0500 EST m=+10.363057650 container start 23724cd2b98cf70cd15894b6500ab24bc5f44b0ed864a28ddbed6859c312ed76 (image=localhost/podman-pause:5.3.1-1733097600, name=c8c001b59877-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.8609] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/5) Jan 04 11:32:35 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 04 11:32:35 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 04 11:32:35 managed-node2 kernel: veth0: entered allmulticast mode Jan 04 11:32:35 managed-node2 kernel: veth0: entered promiscuous mode Jan 04 11:32:35 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 04 11:32:35 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.8728] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.8736] device (veth0): carrier: link connected Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.8741] device (podman1): carrier: link connected Jan 04 11:32:35 managed-node2 (udev-worker)[27272]: Network interface NamePolicy= disabled on kernel command line. Jan 04 11:32:35 managed-node2 (udev-worker)[27273]: Network interface NamePolicy= disabled on kernel command line. Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.9377] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.9395] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.9453] device (podman1): Activation: starting connection 'podman1' (502e50cd-fa9a-4804-aa44-3de28f6105b8) Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.9455] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.9457] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.9459] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.9461] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 04 11:32:35 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2277. Jan 04 11:32:35 managed-node2 systemd[1]: Started run-r472bc244f245414093ac26f7e0451412.scope - /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-r472bc244f245414093ac26f7e0451412.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r472bc244f245414093ac26f7e0451412.scope has finished successfully. ░░ ░░ The job identifier is 2356. Jan 04 11:32:35 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2277. Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.9931] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.9935] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.9943] device (podman1): Activation: successful, device activated. Jan 04 11:32:36 managed-node2 systemd[1]: Started libpod-dbc2b9607a4ca55a5a7069f06017b2c67ca7a59d57bf39bfedfb115ed16ec91f.scope - libcrun container. ░░ Subject: A start job for unit libpod-dbc2b9607a4ca55a5a7069f06017b2c67ca7a59d57bf39bfedfb115ed16ec91f.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-dbc2b9607a4ca55a5a7069f06017b2c67ca7a59d57bf39bfedfb115ed16ec91f.scope has finished successfully. ░░ ░░ The job identifier is 2362. Jan 04 11:32:36 managed-node2 podman[27217]: 2025-01-04 11:32:36.026319901 -0500 EST m=+10.541057567 container init dbc2b9607a4ca55a5a7069f06017b2c67ca7a59d57bf39bfedfb115ed16ec91f (image=localhost/podman-pause:5.3.1-1733097600, name=ecaac7b287a8-infra, pod_id=ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, io.buildah.version=1.38.0) Jan 04 11:32:36 managed-node2 podman[27217]: 2025-01-04 11:32:36.028993156 -0500 EST m=+10.543730773 container start dbc2b9607a4ca55a5a7069f06017b2c67ca7a59d57bf39bfedfb115ed16ec91f (image=localhost/podman-pause:5.3.1-1733097600, name=ecaac7b287a8-infra, pod_id=ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, io.buildah.version=1.38.0) Jan 04 11:32:36 managed-node2 systemd[1]: Started libpod-f27f043c4df71017c1bd967698eecbb37685099825c3c94f42622aa2493af37f.scope - libcrun container. ░░ Subject: A start job for unit libpod-f27f043c4df71017c1bd967698eecbb37685099825c3c94f42622aa2493af37f.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-f27f043c4df71017c1bd967698eecbb37685099825c3c94f42622aa2493af37f.scope has finished successfully. ░░ ░░ The job identifier is 2369. Jan 04 11:32:36 managed-node2 podman[27217]: 2025-01-04 11:32:36.073620242 -0500 EST m=+10.588357802 container init f27f043c4df71017c1bd967698eecbb37685099825c3c94f42622aa2493af37f (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jan 04 11:32:36 managed-node2 podman[27217]: 2025-01-04 11:32:36.07582633 -0500 EST m=+10.590563926 container start f27f043c4df71017c1bd967698eecbb37685099825c3c94f42622aa2493af37f (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jan 04 11:32:36 managed-node2 podman[27217]: 2025-01-04 11:32:36.081743026 -0500 EST m=+10.596480566 pod start ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f (image=, name=httpd2) Jan 04 11:32:36 managed-node2 podman[27217]: Pod: Jan 04 11:32:36 managed-node2 podman[27217]: ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f Jan 04 11:32:36 managed-node2 podman[27217]: Container: Jan 04 11:32:36 managed-node2 podman[27217]: f27f043c4df71017c1bd967698eecbb37685099825c3c94f42622aa2493af37f Jan 04 11:32:36 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully. ░░ ░░ The job identifier is 2177. Jan 04 11:32:36 managed-node2 python3.12[27453]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:32:38 managed-node2 python3.12[27586]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:32:39 managed-node2 python3.12[27718]: ansible-file Invoked with path=/tmp/lsr_83t3r8ir_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:32:39 managed-node2 python3.12[27849]: ansible-file Invoked with path=/tmp/lsr_83t3r8ir_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:32:41 managed-node2 podman[28009]: 2025-01-04 11:32:41.355986126 -0500 EST m=+1.236275522 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 04 11:32:41 managed-node2 python3.12[28154]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:32:42 managed-node2 python3.12[28285]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:32:42 managed-node2 python3.12[28416]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 04 11:32:42 managed-node2 python3.12[28521]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736008362.3190482-16268-253131673991454/.source.yml _original_basename=.pwzt0a85 follow=False checksum=afa449bfcb99ed71cd6828d236f90b0be6428b4e backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:32:43 managed-node2 python3.12[28652]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 04 11:32:43 managed-node2 systemd[1]: Created slice machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice - cgroup machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice. ░░ Subject: A start job for unit machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice has finished successfully. ░░ ░░ The job identifier is 2376. Jan 04 11:32:43 managed-node2 podman[28659]: 2025-01-04 11:32:43.448073385 -0500 EST m=+0.051064137 container create 862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873 (image=localhost/podman-pause:5.3.1-1733097600, name=5c8b39a78908-infra, pod_id=5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3, io.buildah.version=1.38.0) Jan 04 11:32:43 managed-node2 podman[28659]: 2025-01-04 11:32:43.451231682 -0500 EST m=+0.054222484 pod create 5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3 (image=, name=httpd3) Jan 04 11:32:43 managed-node2 podman[28659]: 2025-01-04 11:32:43.47299563 -0500 EST m=+0.075986392 container create f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jan 04 11:32:43 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jan 04 11:32:43 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jan 04 11:32:43 managed-node2 kernel: veth1: entered allmulticast mode Jan 04 11:32:43 managed-node2 kernel: veth1: entered promiscuous mode Jan 04 11:32:43 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jan 04 11:32:43 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Jan 04 11:32:43 managed-node2 NetworkManager[784]: [1736008363.5000] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/7) Jan 04 11:32:43 managed-node2 NetworkManager[784]: [1736008363.5015] device (veth1): carrier: link connected Jan 04 11:32:43 managed-node2 podman[28659]: 2025-01-04 11:32:43.453461035 -0500 EST m=+0.056452036 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 04 11:32:43 managed-node2 (udev-worker)[28676]: Network interface NamePolicy= disabled on kernel command line. Jan 04 11:32:43 managed-node2 systemd[1]: Started libpod-conmon-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873.scope. ░░ Subject: A start job for unit libpod-conmon-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873.scope has finished successfully. ░░ ░░ The job identifier is 2383. Jan 04 11:32:43 managed-node2 systemd[1]: Started libpod-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873.scope - libcrun container. ░░ Subject: A start job for unit libpod-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873.scope has finished successfully. ░░ ░░ The job identifier is 2390. Jan 04 11:32:43 managed-node2 podman[28659]: 2025-01-04 11:32:43.602871064 -0500 EST m=+0.205861976 container init 862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873 (image=localhost/podman-pause:5.3.1-1733097600, name=5c8b39a78908-infra, pod_id=5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3, io.buildah.version=1.38.0) Jan 04 11:32:43 managed-node2 podman[28659]: 2025-01-04 11:32:43.605779897 -0500 EST m=+0.208770719 container start 862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873 (image=localhost/podman-pause:5.3.1-1733097600, name=5c8b39a78908-infra, pod_id=5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3, io.buildah.version=1.38.0) Jan 04 11:32:43 managed-node2 systemd[1]: Started libpod-conmon-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope. ░░ Subject: A start job for unit libpod-conmon-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope has finished successfully. ░░ ░░ The job identifier is 2397. Jan 04 11:32:43 managed-node2 systemd[1]: Started libpod-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope - libcrun container. ░░ Subject: A start job for unit libpod-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope has finished successfully. ░░ ░░ The job identifier is 2404. Jan 04 11:32:43 managed-node2 podman[28659]: 2025-01-04 11:32:43.654110202 -0500 EST m=+0.257101117 container init f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z) Jan 04 11:32:43 managed-node2 podman[28659]: 2025-01-04 11:32:43.656617666 -0500 EST m=+0.259608549 container start f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Jan 04 11:32:43 managed-node2 podman[28659]: 2025-01-04 11:32:43.659914457 -0500 EST m=+0.262905222 pod start 5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3 (image=, name=httpd3) Jan 04 11:32:44 managed-node2 python3.12[28837]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:32:44 managed-node2 systemd[1]: Reload requested from client PID 28838 ('systemctl') (unit session-5.scope)... Jan 04 11:32:44 managed-node2 systemd[1]: Reloading... Jan 04 11:32:44 managed-node2 systemd[1]: Reloading finished in 217 ms. Jan 04 11:32:45 managed-node2 python3.12[29024]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Jan 04 11:32:45 managed-node2 systemd[1]: Reload requested from client PID 29027 ('systemctl') (unit session-5.scope)... Jan 04 11:32:45 managed-node2 systemd[1]: Reloading... Jan 04 11:32:45 managed-node2 systemd[1]: Reloading finished in 214 ms. Jan 04 11:32:46 managed-node2 python3.12[29214]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 04 11:32:46 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 04 11:32:46 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution. ░░ ░░ The job identifier is 2411. Jan 04 11:32:46 managed-node2 podman[29219]: 2025-01-04 11:32:46.077838962 -0500 EST m=+0.023800799 pod stop 5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3 (image=, name=httpd3) Jan 04 11:32:46 managed-node2 podman[29219]: 2025-01-04 11:32:46.089786817 -0500 EST m=+0.035748685 container stop 862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873 (image=localhost/podman-pause:5.3.1-1733097600, name=5c8b39a78908-infra, pod_id=5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3, io.buildah.version=1.38.0) Jan 04 11:32:46 managed-node2 systemd[1]: libpod-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873.scope has successfully entered the 'dead' state. Jan 04 11:32:46 managed-node2 podman[29219]: 2025-01-04 11:32:46.096689652 -0500 EST m=+0.042651405 container died 862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873 (image=localhost/podman-pause:5.3.1-1733097600, name=5c8b39a78908-infra, io.buildah.version=1.38.0) Jan 04 11:32:46 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jan 04 11:32:46 managed-node2 kernel: veth1 (unregistering): left allmulticast mode Jan 04 11:32:46 managed-node2 kernel: veth1 (unregistering): left promiscuous mode Jan 04 11:32:46 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jan 04 11:32:46 managed-node2 systemd[1]: run-netns-netns\x2d76f641b4\x2d14c6\x2d7f71\x2debb7\x2d7b4c05b4a12e.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d76f641b4\x2d14c6\x2d7f71\x2debb7\x2d7b4c05b4a12e.mount has successfully entered the 'dead' state. Jan 04 11:32:46 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873-userdata-shm.mount has successfully entered the 'dead' state. Jan 04 11:32:46 managed-node2 systemd[1]: var-lib-containers-storage-overlay-640391f965bcd034ea649da3273b1a8b45e0901c17f5d62b4a666838f5a073d8-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-640391f965bcd034ea649da3273b1a8b45e0901c17f5d62b4a666838f5a073d8-merged.mount has successfully entered the 'dead' state. Jan 04 11:32:46 managed-node2 podman[29219]: 2025-01-04 11:32:46.159251578 -0500 EST m=+0.105213282 container cleanup 862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873 (image=localhost/podman-pause:5.3.1-1733097600, name=5c8b39a78908-infra, pod_id=5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3, io.buildah.version=1.38.0) Jan 04 11:32:46 managed-node2 systemd[1]: libpod-conmon-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873.scope has successfully entered the 'dead' state. Jan 04 11:32:56 managed-node2 podman[29219]: time="2025-01-04T11:32:56-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL" Jan 04 11:32:56 managed-node2 systemd[1]: libpod-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope has successfully entered the 'dead' state. Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.10190978 -0500 EST m=+10.047871602 container died f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 04 11:32:56 managed-node2 systemd[1]: var-lib-containers-storage-overlay-1a58fa6cb4e9ad7e43e7ef37f8f31a0f90a4fd6ed89e272e0786932b8faf7aee-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-1a58fa6cb4e9ad7e43e7ef37f8f31a0f90a4fd6ed89e272e0786932b8faf7aee-merged.mount has successfully entered the 'dead' state. Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.127890519 -0500 EST m=+10.073852732 container cleanup f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 04 11:32:56 managed-node2 systemd[1]: Stopping libpod-conmon-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope... ░░ Subject: A stop job for unit libpod-conmon-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope has begun execution. ░░ ░░ The job identifier is 2497. Jan 04 11:32:56 managed-node2 systemd[1]: libpod-conmon-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope has successfully entered the 'dead' state. Jan 04 11:32:56 managed-node2 systemd[1]: Stopped libpod-conmon-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope. ░░ Subject: A stop job for unit libpod-conmon-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope has finished. ░░ ░░ The job identifier is 2497 and the job result is done. Jan 04 11:32:56 managed-node2 systemd[1]: Removed slice machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice - cgroup machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice. ░░ Subject: A stop job for unit machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice has finished. ░░ ░░ The job identifier is 2496 and the job result is done. Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.13903142 -0500 EST m=+10.084993237 pod stop 5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3 (image=, name=httpd3) Jan 04 11:32:56 managed-node2 systemd[1]: machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice: Failed to open /run/systemd/transient/machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice: No such file or directory Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.143655673 -0500 EST m=+10.089617517 pod stop 5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3 (image=, name=httpd3) Jan 04 11:32:56 managed-node2 systemd[1]: machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice: Failed to open /run/systemd/transient/machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice: No such file or directory Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.162026263 -0500 EST m=+10.107988003 container remove f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.178862859 -0500 EST m=+10.124824592 container remove 862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873 (image=localhost/podman-pause:5.3.1-1733097600, name=5c8b39a78908-infra, pod_id=5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3, io.buildah.version=1.38.0) Jan 04 11:32:56 managed-node2 systemd[1]: machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice: Failed to open /run/systemd/transient/machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice: No such file or directory Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.185623785 -0500 EST m=+10.131585491 pod remove 5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3 (image=, name=httpd3) Jan 04 11:32:56 managed-node2 podman[29219]: Pods stopped: Jan 04 11:32:56 managed-node2 podman[29219]: 5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3 Jan 04 11:32:56 managed-node2 podman[29219]: Pods removed: Jan 04 11:32:56 managed-node2 podman[29219]: 5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3 Jan 04 11:32:56 managed-node2 podman[29219]: Secrets removed: Jan 04 11:32:56 managed-node2 podman[29219]: Volumes removed: Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.208822895 -0500 EST m=+10.154784618 container create ea65b4c142a1ecc4c5acde13aead0deeca5cc4c6b5b06f3925f8cc41f46c26ab (image=localhost/podman-pause:5.3.1-1733097600, name=8c5a350fe43d-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 04 11:32:56 managed-node2 systemd[1]: Created slice machine-libpod_pod_a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d.slice - cgroup machine-libpod_pod_a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d.slice. ░░ Subject: A start job for unit machine-libpod_pod_a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d.slice has finished successfully. ░░ ░░ The job identifier is 2498. Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.237772636 -0500 EST m=+10.183734449 container create 196161f20f831fa75ecc72ab533fa66e7dcf1074574fe2c3143d784148c1543a (image=localhost/podman-pause:5.3.1-1733097600, name=a93b7ede24ca-infra, pod_id=a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, io.buildah.version=1.38.0) Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.241831508 -0500 EST m=+10.187793210 pod create a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d (image=, name=httpd3) Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.262804828 -0500 EST m=+10.208766641 container create cba46159e385d5b6de190f637934a5720bac746a6ef2ab1573d80109f1bbe7f1 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry) Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.263133474 -0500 EST m=+10.209095217 container restart ea65b4c142a1ecc4c5acde13aead0deeca5cc4c6b5b06f3925f8cc41f46c26ab (image=localhost/podman-pause:5.3.1-1733097600, name=8c5a350fe43d-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 04 11:32:56 managed-node2 systemd[1]: Started libpod-ea65b4c142a1ecc4c5acde13aead0deeca5cc4c6b5b06f3925f8cc41f46c26ab.scope - libcrun container. ░░ Subject: A start job for unit libpod-ea65b4c142a1ecc4c5acde13aead0deeca5cc4c6b5b06f3925f8cc41f46c26ab.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-ea65b4c142a1ecc4c5acde13aead0deeca5cc4c6b5b06f3925f8cc41f46c26ab.scope has finished successfully. ░░ ░░ The job identifier is 2504. Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.306577989 -0500 EST m=+10.252539792 container init ea65b4c142a1ecc4c5acde13aead0deeca5cc4c6b5b06f3925f8cc41f46c26ab (image=localhost/podman-pause:5.3.1-1733097600, name=8c5a350fe43d-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.308869348 -0500 EST m=+10.254831125 container start ea65b4c142a1ecc4c5acde13aead0deeca5cc4c6b5b06f3925f8cc41f46c26ab (image=localhost/podman-pause:5.3.1-1733097600, name=8c5a350fe43d-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.243893918 -0500 EST m=+10.189855760 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 04 11:32:56 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jan 04 11:32:56 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jan 04 11:32:56 managed-node2 kernel: veth1: entered allmulticast mode Jan 04 11:32:56 managed-node2 kernel: veth1: entered promiscuous mode Jan 04 11:32:56 managed-node2 NetworkManager[784]: [1736008376.3388] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/8) Jan 04 11:32:56 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jan 04 11:32:56 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Jan 04 11:32:56 managed-node2 NetworkManager[784]: [1736008376.3445] device (veth1): carrier: link connected Jan 04 11:32:56 managed-node2 (udev-worker)[29263]: Network interface NamePolicy= disabled on kernel command line. Jan 04 11:32:56 managed-node2 systemd[1]: Started libpod-196161f20f831fa75ecc72ab533fa66e7dcf1074574fe2c3143d784148c1543a.scope - libcrun container. ░░ Subject: A start job for unit libpod-196161f20f831fa75ecc72ab533fa66e7dcf1074574fe2c3143d784148c1543a.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-196161f20f831fa75ecc72ab533fa66e7dcf1074574fe2c3143d784148c1543a.scope has finished successfully. ░░ ░░ The job identifier is 2511. Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.43783887 -0500 EST m=+10.383800650 container init 196161f20f831fa75ecc72ab533fa66e7dcf1074574fe2c3143d784148c1543a (image=localhost/podman-pause:5.3.1-1733097600, name=a93b7ede24ca-infra, pod_id=a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, io.buildah.version=1.38.0) Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.440103958 -0500 EST m=+10.386065808 container start 196161f20f831fa75ecc72ab533fa66e7dcf1074574fe2c3143d784148c1543a (image=localhost/podman-pause:5.3.1-1733097600, name=a93b7ede24ca-infra, pod_id=a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, io.buildah.version=1.38.0) Jan 04 11:32:56 managed-node2 systemd[1]: Started libpod-cba46159e385d5b6de190f637934a5720bac746a6ef2ab1573d80109f1bbe7f1.scope - libcrun container. ░░ Subject: A start job for unit libpod-cba46159e385d5b6de190f637934a5720bac746a6ef2ab1573d80109f1bbe7f1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-cba46159e385d5b6de190f637934a5720bac746a6ef2ab1573d80109f1bbe7f1.scope has finished successfully. ░░ ░░ The job identifier is 2518. Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.472293597 -0500 EST m=+10.418255355 container init cba46159e385d5b6de190f637934a5720bac746a6ef2ab1573d80109f1bbe7f1 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.474487097 -0500 EST m=+10.420449136 container start cba46159e385d5b6de190f637934a5720bac746a6ef2ab1573d80109f1bbe7f1 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.477489212 -0500 EST m=+10.423450951 pod start a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d (image=, name=httpd3) Jan 04 11:32:56 managed-node2 podman[29219]: Pod: Jan 04 11:32:56 managed-node2 podman[29219]: a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d Jan 04 11:32:56 managed-node2 podman[29219]: Container: Jan 04 11:32:56 managed-node2 podman[29219]: cba46159e385d5b6de190f637934a5720bac746a6ef2ab1573d80109f1bbe7f1 Jan 04 11:32:56 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully. ░░ ░░ The job identifier is 2411. Jan 04 11:32:57 managed-node2 sudo[29469]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zxufrowjytjuwrcddqoyejjgfpptwedl ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1736008377.0993202-16884-78670723418786/AnsiballZ_command.py' Jan 04 11:32:57 managed-node2 sudo[29469]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-29469) opened. Jan 04 11:32:57 managed-node2 sudo[29469]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 04 11:32:57 managed-node2 python3.12[29472]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:32:57 managed-node2 systemd[23226]: Started podman-29480.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 112. Jan 04 11:32:57 managed-node2 sudo[29469]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 04 11:32:57 managed-node2 python3.12[29620]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:32:58 managed-node2 python3.12[29759]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:32:58 managed-node2 sudo[29940]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ktbgkgbcavztiutgtmblxyqskehvuryj ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736008378.5466173-16967-12676392522251/AnsiballZ_command.py' Jan 04 11:32:58 managed-node2 sudo[29940]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-29940) opened. Jan 04 11:32:58 managed-node2 sudo[29940]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 04 11:32:58 managed-node2 python3.12[29943]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:32:58 managed-node2 sudo[29940]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 04 11:32:59 managed-node2 python3.12[30077]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:32:59 managed-node2 python3.12[30211]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:33:00 managed-node2 python3.12[30345]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:00 managed-node2 python3.12[30478]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:01 managed-node2 python3.12[30609]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:33:01 managed-node2 python3.12[30741]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:02 managed-node2 python3.12[30872]: ansible-file Invoked with path=/tmp/lsr_83t3r8ir_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:04 managed-node2 python3.12[31046]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jan 04 11:33:05 managed-node2 python3.12[31219]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:33:06 managed-node2 python3.12[31350]: ansible-ansible.legacy.dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 04 11:33:08 managed-node2 python3.12[31486]: ansible-ansible.legacy.dnf Invoked with name=['certmonger', 'python3-packaging'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 04 11:33:09 managed-node2 dbus-broker-launch[633]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 04 11:33:09 managed-node2 dbus-broker-launch[23700]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 04 11:33:09 managed-node2 dbus-broker-launch[633]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 04 11:33:09 managed-node2 dbus-broker-launch[23700]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 04 11:33:09 managed-node2 dbus-broker-launch[23700]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 04 11:33:09 managed-node2 dbus-broker-launch[633]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 04 11:33:09 managed-node2 dbus-broker-launch[23700]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 04 11:33:09 managed-node2 dbus-broker-launch[23700]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 04 11:33:09 managed-node2 dbus-broker-launch[23700]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 04 11:33:09 managed-node2 systemd[1]: Reload requested from client PID 31494 ('systemctl') (unit session-5.scope)... Jan 04 11:33:09 managed-node2 systemd[1]: Reloading... Jan 04 11:33:09 managed-node2 systemd[1]: Reloading finished in 213 ms. Jan 04 11:33:10 managed-node2 systemd[1]: Started run-rea937341daa74dcebd9e99f7e419d30e.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-rea937341daa74dcebd9e99f7e419d30e.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-rea937341daa74dcebd9e99f7e419d30e.service has finished successfully. ░░ ░░ The job identifier is 2529. Jan 04 11:33:10 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 2607. Jan 04 11:33:10 managed-node2 systemd[1]: Reload requested from client PID 31557 ('systemctl') (unit session-5.scope)... Jan 04 11:33:10 managed-node2 systemd[1]: Reloading... Jan 04 11:33:10 managed-node2 systemd[1]: Reloading finished in 345 ms. Jan 04 11:33:10 managed-node2 systemd[1]: Queuing reload/restart jobs for marked units… Jan 04 11:33:11 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jan 04 11:33:11 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 2607. Jan 04 11:33:11 managed-node2 systemd[1]: run-rea937341daa74dcebd9e99f7e419d30e.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-rea937341daa74dcebd9e99f7e419d30e.service has successfully entered the 'dead' state. Jan 04 11:33:11 managed-node2 python3.12[31750]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:12 managed-node2 python3.12[31881]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:13 managed-node2 python3.12[32012]: ansible-ansible.legacy.systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 04 11:33:13 managed-node2 systemd[1]: Reload requested from client PID 32015 ('systemctl') (unit session-5.scope)... Jan 04 11:33:13 managed-node2 systemd[1]: Reloading... Jan 04 11:33:13 managed-node2 systemd[1]: Reloading finished in 210 ms. Jan 04 11:33:13 managed-node2 systemd[1]: Starting certmonger.service - Certificate monitoring and PKI enrollment... ░░ Subject: A start job for unit certmonger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has begun execution. ░░ ░░ The job identifier is 2685. Jan 04 11:33:13 managed-node2 (rtmonger)[32072]: certmonger.service: Referenced but unset environment variable evaluates to an empty string: OPTS Jan 04 11:33:13 managed-node2 systemd[1]: Started certmonger.service - Certificate monitoring and PKI enrollment. ░░ Subject: A start job for unit certmonger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has finished successfully. ░░ ░░ The job identifier is 2685. Jan 04 11:33:14 managed-node2 python3.12[32230]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=# # Ansible managed # # system_role:certificate provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 rsyslogd[662]: imjournal: journal files changed, reloading... [v8.2408.0-2.el10 try https://www.rsyslog.com/e/0 ] Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32246]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved. Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 python3.12[32377]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jan 04 11:33:15 managed-node2 python3.12[32508]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key Jan 04 11:33:15 managed-node2 python3.12[32639]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jan 04 11:33:16 managed-node2 python3.12[32770]: ansible-ansible.legacy.command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:33:16 managed-node2 certmonger[32072]: 2025-01-04 11:33:16 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:16 managed-node2 python3.12[32902]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:17 managed-node2 python3.12[33033]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:17 managed-node2 python3.12[33164]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:18 managed-node2 python3.12[33295]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:33:18 managed-node2 python3.12[33426]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:33:20 managed-node2 python3.12[33688]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:33:21 managed-node2 python3.12[33825]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 04 11:33:22 managed-node2 python3.12[33957]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:33:24 managed-node2 python3.12[34090]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:33:24 managed-node2 python3.12[34221]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:33:25 managed-node2 python3.12[34352]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 04 11:33:26 managed-node2 python3.12[34484]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 04 11:33:26 managed-node2 python3.12[34617]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 04 11:33:27 managed-node2 python3.12[34750]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 04 11:33:28 managed-node2 python3.12[34881]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 04 11:33:33 managed-node2 python3.12[35488]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:33:34 managed-node2 python3.12[35621]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:34 managed-node2 python3.12[35752]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 04 11:33:35 managed-node2 python3.12[35857]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736008414.7326126-18841-108046773840670/.source.network dest=/etc/containers/systemd/quadlet-demo.network owner=root group=0 mode=0644 _original_basename=quadlet-demo.network follow=False checksum=e57c08d49aff4bae8daab138d913aeddaa8682a0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:36 managed-node2 python3.12[35988]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:33:36 managed-node2 systemd[1]: Reload requested from client PID 35989 ('systemctl') (unit session-5.scope)... Jan 04 11:33:36 managed-node2 systemd[1]: Reloading... Jan 04 11:33:36 managed-node2 systemd[1]: Reloading finished in 213 ms. Jan 04 11:33:36 managed-node2 python3.12[36175]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 04 11:33:36 managed-node2 systemd[1]: Starting quadlet-demo-network.service... ░░ Subject: A start job for unit quadlet-demo-network.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has begun execution. ░░ ░░ The job identifier is 2764. Jan 04 11:33:36 managed-node2 quadlet-demo-network[36179]: systemd-quadlet-demo Jan 04 11:33:36 managed-node2 systemd[1]: Finished quadlet-demo-network.service. ░░ Subject: A start job for unit quadlet-demo-network.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has finished successfully. ░░ ░░ The job identifier is 2764. Jan 04 11:33:37 managed-node2 python3.12[36317]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:33:39 managed-node2 python3.12[36450]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:40 managed-node2 python3.12[36581]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 04 11:33:40 managed-node2 python3.12[36686]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736008419.8707628-19085-91622550066190/.source.volume dest=/etc/containers/systemd/quadlet-demo-mysql.volume owner=root group=0 mode=0644 _original_basename=quadlet-demo-mysql.volume follow=False checksum=585f8cbdf0ec73000f9227dcffbef71e9552ea4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:41 managed-node2 python3.12[36817]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:33:41 managed-node2 systemd[1]: Reload requested from client PID 36818 ('systemctl') (unit session-5.scope)... Jan 04 11:33:41 managed-node2 systemd[1]: Reloading... Jan 04 11:33:41 managed-node2 systemd[1]: Reloading finished in 220 ms. Jan 04 11:33:41 managed-node2 python3.12[37004]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 04 11:33:41 managed-node2 systemd[1]: Starting quadlet-demo-mysql-volume.service... ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has begun execution. ░░ ░░ The job identifier is 2848. Jan 04 11:33:42 managed-node2 podman[37008]: 2025-01-04 11:33:42.030547669 -0500 EST m=+0.026014171 volume create systemd-quadlet-demo-mysql Jan 04 11:33:42 managed-node2 quadlet-demo-mysql-volume[37008]: systemd-quadlet-demo-mysql Jan 04 11:33:42 managed-node2 systemd[1]: Finished quadlet-demo-mysql-volume.service. ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has finished successfully. ░░ ░░ The job identifier is 2848. Jan 04 11:33:43 managed-node2 python3.12[37146]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:33:44 managed-node2 python3.12[37279]: ansible-file Invoked with path=/tmp/quadlet_demo state=directory owner=root group=root mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:51 managed-node2 podman[37418]: 2025-01-04 11:33:51.494779884 -0500 EST m=+5.942509700 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 04 11:33:51 managed-node2 python3.12[37729]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:52 managed-node2 python3.12[37860]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 04 11:33:52 managed-node2 python3.12[37965]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo-mysql.container owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736008432.0441234-19575-189994622198139/.source.container _original_basename=.c2vzggou follow=False checksum=ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:53 managed-node2 python3.12[38096]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:33:53 managed-node2 systemd[1]: Reload requested from client PID 38097 ('systemctl') (unit session-5.scope)... Jan 04 11:33:53 managed-node2 systemd[1]: Reloading... Jan 04 11:33:53 managed-node2 systemd[1]: Reloading finished in 217 ms. Jan 04 11:33:53 managed-node2 python3.12[38283]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 04 11:33:53 managed-node2 systemd[1]: Starting quadlet-demo-mysql.service... ░░ Subject: A start job for unit quadlet-demo-mysql.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has begun execution. ░░ ░░ The job identifier is 2932. Jan 04 11:33:54 managed-node2 podman[38287]: 2025-01-04 11:33:54.050588933 -0500 EST m=+0.038398037 container create 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.0751] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/9) Jan 04 11:33:54 managed-node2 kernel: podman2: port 1(veth2) entered blocking state Jan 04 11:33:54 managed-node2 kernel: podman2: port 1(veth2) entered disabled state Jan 04 11:33:54 managed-node2 kernel: veth2: entered allmulticast mode Jan 04 11:33:54 managed-node2 kernel: veth2: entered promiscuous mode Jan 04 11:33:54 managed-node2 kernel: podman2: port 1(veth2) entered blocking state Jan 04 11:33:54 managed-node2 kernel: podman2: port 1(veth2) entered forwarding state Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.0837] manager: (veth2): new Veth device (/org/freedesktop/NetworkManager/Devices/10) Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.0869] device (veth2): carrier: link connected Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.0871] device (podman2): carrier: link connected Jan 04 11:33:54 managed-node2 (udev-worker)[38303]: Network interface NamePolicy= disabled on kernel command line. Jan 04 11:33:54 managed-node2 (udev-worker)[38302]: Network interface NamePolicy= disabled on kernel command line. Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.1252] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.1263] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.1271] device (podman2): Activation: starting connection 'podman2' (7cd99f47-f94f-4713-adfd-4d35815d93d3) Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.1272] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.1286] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.1289] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.1293] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 04 11:33:54 managed-node2 podman[38287]: 2025-01-04 11:33:54.036000448 -0500 EST m=+0.023809823 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 04 11:33:54 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 3019. Jan 04 11:33:54 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 3019. Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.1741] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.1744] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.1752] device (podman2): Activation: successful, device activated. Jan 04 11:33:54 managed-node2 systemd[1]: Started 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-1404942fa2db9863.timer - /usr/bin/podman healthcheck run 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e. ░░ Subject: A start job for unit 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-1404942fa2db9863.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-1404942fa2db9863.timer has finished successfully. ░░ ░░ The job identifier is 3098. Jan 04 11:33:54 managed-node2 podman[38287]: 2025-01-04 11:33:54.224823382 -0500 EST m=+0.212632600 container init 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 04 11:33:54 managed-node2 systemd[1]: Started quadlet-demo-mysql.service. ░░ Subject: A start job for unit quadlet-demo-mysql.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has finished successfully. ░░ ░░ The job identifier is 2932. Jan 04 11:33:54 managed-node2 podman[38287]: 2025-01-04 11:33:54.279365523 -0500 EST m=+0.267174828 container start 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 04 11:33:54 managed-node2 quadlet-demo-mysql[38287]: 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e Jan 04 11:33:54 managed-node2 podman[38351]: 2025-01-04 11:33:54.409100651 -0500 EST m=+0.124781707 container health_status 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 04 11:33:55 managed-node2 python3.12[38541]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:33:56 managed-node2 python3.12[38685]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:56 managed-node2 python3.12[38816]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 04 11:33:57 managed-node2 python3.12[38921]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736008436.5920036-19781-220862317646466/.source.yml dest=/etc/containers/systemd/envoy-proxy-configmap.yml owner=root group=0 mode=0644 _original_basename=envoy-proxy-configmap.yml follow=False checksum=d681c7d56f912150d041873e880818b22a90c188 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:57 managed-node2 python3.12[39076]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:33:57 managed-node2 systemd[1]: Reload requested from client PID 39077 ('systemctl') (unit session-5.scope)... Jan 04 11:33:57 managed-node2 systemd[1]: Reloading... Jan 04 11:33:58 managed-node2 systemd[1]: Reloading finished in 229 ms. Jan 04 11:33:58 managed-node2 python3.12[39264]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:34:00 managed-node2 python3.12[39425]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:34:00 managed-node2 python3.12[39565]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 04 11:34:01 managed-node2 python3.12[39670]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736008440.4832172-19964-76184447797368/.source.yml _original_basename=.izqh1yla follow=False checksum=998dccde0483b1654327a46ddd89cbaa47650370 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:34:02 managed-node2 python3.12[39801]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:34:02 managed-node2 systemd[1]: Reload requested from client PID 39809 ('systemctl') (unit session-5.scope)... Jan 04 11:34:02 managed-node2 systemd[1]: Reloading... Jan 04 11:34:02 managed-node2 systemd[1]: Reloading finished in 231 ms. Jan 04 11:34:03 managed-node2 python3.12[39997]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:34:04 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 04 11:34:04 managed-node2 python3.12[40142]: ansible-slurp Invoked with path=/etc/containers/systemd/quadlet-demo.yml src=/etc/containers/systemd/quadlet-demo.yml Jan 04 11:34:05 managed-node2 python3.12[40285]: ansible-file Invoked with path=/tmp/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:34:05 managed-node2 python3.12[40416]: ansible-file Invoked with path=/tmp/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:34:18 managed-node2 podman[40555]: 2025-01-04 11:34:18.155808556 -0500 EST m=+12.224013735 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Jan 04 11:34:22 managed-node2 podman[40973]: 2025-01-04 11:34:22.482748384 -0500 EST m=+3.852421401 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Jan 04 11:34:22 managed-node2 python3.12[41236]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:34:23 managed-node2 python3.12[41367]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 04 11:34:23 managed-node2 python3.12[41472]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736008463.0030222-20502-2952385679144/.source.kube dest=/etc/containers/systemd/quadlet-demo.kube owner=root group=0 mode=0644 _original_basename=quadlet-demo.kube follow=False checksum=7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:34:24 managed-node2 python3.12[41603]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:34:24 managed-node2 systemd[1]: Reload requested from client PID 41604 ('systemctl') (unit session-5.scope)... Jan 04 11:34:24 managed-node2 systemd[1]: Reloading... Jan 04 11:34:24 managed-node2 systemd[1]: Reloading finished in 221 ms. Jan 04 11:34:24 managed-node2 podman[41768]: 2025-01-04 11:34:24.757102792 -0500 EST m=+0.122146579 container health_status 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 04 11:34:24 managed-node2 python3.12[41797]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 04 11:34:24 managed-node2 systemd[1]: Starting quadlet-demo.service... ░░ Subject: A start job for unit quadlet-demo.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has begun execution. ░░ ░░ The job identifier is 3332. Jan 04 11:34:25 managed-node2 quadlet-demo[41809]: Pods stopped: Jan 04 11:34:25 managed-node2 quadlet-demo[41809]: Pods removed: Jan 04 11:34:25 managed-node2 quadlet-demo[41809]: Secrets removed: Jan 04 11:34:25 managed-node2 quadlet-demo[41809]: Volumes removed: Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.013772373 -0500 EST m=+0.028358319 volume create wp-pv-claim Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.039427953 -0500 EST m=+0.054013906 container create f07164ee3e905df54a70f59e80d3960832e74eb9830cb83662de5ad55e2abf12 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.048209442 -0500 EST m=+0.062795385 volume create envoy-proxy-config Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.054725023 -0500 EST m=+0.069310938 volume create envoy-certificates Jan 04 11:34:25 managed-node2 systemd[1]: Created slice machine-libpod_pod_56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3.slice - cgroup machine-libpod_pod_56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3.slice. ░░ Subject: A start job for unit machine-libpod_pod_56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3.slice has finished successfully. ░░ ░░ The job identifier is 3419. Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.099796146 -0500 EST m=+0.114382082 container create 65b9ce253bc156b6f3d547f7ab92379b31509f467860bb1e1e8fb7619c76b9b3 (image=localhost/podman-pause:5.3.1-1733097600, name=56bbf7fc2aa4-infra, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.106000364 -0500 EST m=+0.120586274 pod create 56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3 (image=, name=quadlet-demo) Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.143568363 -0500 EST m=+0.158154412 container create 99436d65d89af03f14af27dcb71779c19cd57b99f6b1b50d15c2382093ca4c08 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.174829744 -0500 EST m=+0.189415811 container create 23c3f79e2bc8df0b3cda71af6c29855118c1459ae9e282ca023ba08f99e48001 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.175162259 -0500 EST m=+0.189748206 container restart f07164ee3e905df54a70f59e80d3960832e74eb9830cb83662de5ad55e2abf12 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:34:25 managed-node2 systemd[23226]: Starting grub-boot-success.service - Mark boot as successful... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 118. Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.11220576 -0500 EST m=+0.126791876 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.147419493 -0500 EST m=+0.162005572 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Jan 04 11:34:25 managed-node2 systemd[1]: Started libpod-f07164ee3e905df54a70f59e80d3960832e74eb9830cb83662de5ad55e2abf12.scope - libcrun container. ░░ Subject: A start job for unit libpod-f07164ee3e905df54a70f59e80d3960832e74eb9830cb83662de5ad55e2abf12.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-f07164ee3e905df54a70f59e80d3960832e74eb9830cb83662de5ad55e2abf12.scope has finished successfully. ░░ ░░ The job identifier is 3425. Jan 04 11:34:25 managed-node2 systemd[23226]: Finished grub-boot-success.service - Mark boot as successful. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 118. Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.218300069 -0500 EST m=+0.232886160 container init f07164ee3e905df54a70f59e80d3960832e74eb9830cb83662de5ad55e2abf12 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.220459012 -0500 EST m=+0.235045097 container start f07164ee3e905df54a70f59e80d3960832e74eb9830cb83662de5ad55e2abf12 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:34:25 managed-node2 kernel: podman2: port 2(veth3) entered blocking state Jan 04 11:34:25 managed-node2 kernel: podman2: port 2(veth3) entered disabled state Jan 04 11:34:25 managed-node2 kernel: veth3: entered allmulticast mode Jan 04 11:34:25 managed-node2 kernel: veth3: entered promiscuous mode Jan 04 11:34:25 managed-node2 kernel: podman2: port 2(veth3) entered blocking state Jan 04 11:34:25 managed-node2 kernel: podman2: port 2(veth3) entered forwarding state Jan 04 11:34:25 managed-node2 NetworkManager[784]: [1736008465.2503] manager: (veth3): new Veth device (/org/freedesktop/NetworkManager/Devices/11) Jan 04 11:34:25 managed-node2 NetworkManager[784]: [1736008465.2541] device (veth3): carrier: link connected Jan 04 11:34:25 managed-node2 (udev-worker)[41833]: Network interface NamePolicy= disabled on kernel command line. Jan 04 11:34:25 managed-node2 systemd[1]: Started libpod-65b9ce253bc156b6f3d547f7ab92379b31509f467860bb1e1e8fb7619c76b9b3.scope - libcrun container. ░░ Subject: A start job for unit libpod-65b9ce253bc156b6f3d547f7ab92379b31509f467860bb1e1e8fb7619c76b9b3.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-65b9ce253bc156b6f3d547f7ab92379b31509f467860bb1e1e8fb7619c76b9b3.scope has finished successfully. ░░ ░░ The job identifier is 3432. Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.335532288 -0500 EST m=+0.350118287 container init 65b9ce253bc156b6f3d547f7ab92379b31509f467860bb1e1e8fb7619c76b9b3 (image=localhost/podman-pause:5.3.1-1733097600, name=56bbf7fc2aa4-infra, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.337740363 -0500 EST m=+0.352326472 container start 65b9ce253bc156b6f3d547f7ab92379b31509f467860bb1e1e8fb7619c76b9b3 (image=localhost/podman-pause:5.3.1-1733097600, name=56bbf7fc2aa4-infra, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 04 11:34:25 managed-node2 systemd[1]: Started libpod-99436d65d89af03f14af27dcb71779c19cd57b99f6b1b50d15c2382093ca4c08.scope - libcrun container. ░░ Subject: A start job for unit libpod-99436d65d89af03f14af27dcb71779c19cd57b99f6b1b50d15c2382093ca4c08.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-99436d65d89af03f14af27dcb71779c19cd57b99f6b1b50d15c2382093ca4c08.scope has finished successfully. ░░ ░░ The job identifier is 3439. Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.389982183 -0500 EST m=+0.404568172 container init 99436d65d89af03f14af27dcb71779c19cd57b99f6b1b50d15c2382093ca4c08 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.392369406 -0500 EST m=+0.406955405 container start 99436d65d89af03f14af27dcb71779c19cd57b99f6b1b50d15c2382093ca4c08 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:34:25 managed-node2 systemd[1]: Started libpod-23c3f79e2bc8df0b3cda71af6c29855118c1459ae9e282ca023ba08f99e48001.scope - libcrun container. ░░ Subject: A start job for unit libpod-23c3f79e2bc8df0b3cda71af6c29855118c1459ae9e282ca023ba08f99e48001.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-23c3f79e2bc8df0b3cda71af6c29855118c1459ae9e282ca023ba08f99e48001.scope has finished successfully. ░░ ░░ The job identifier is 3446. Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.459204733 -0500 EST m=+0.473790707 container init 23c3f79e2bc8df0b3cda71af6c29855118c1459ae9e282ca023ba08f99e48001 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.461460007 -0500 EST m=+0.476046011 container start 23c3f79e2bc8df0b3cda71af6c29855118c1459ae9e282ca023ba08f99e48001 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.468364484 -0500 EST m=+0.482950426 pod start 56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3 (image=, name=quadlet-demo) Jan 04 11:34:25 managed-node2 systemd[1]: Started quadlet-demo.service. ░░ Subject: A start job for unit quadlet-demo.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has finished successfully. ░░ ░░ The job identifier is 3332. Jan 04 11:34:25 managed-node2 quadlet-demo[41809]: Volumes: Jan 04 11:34:25 managed-node2 quadlet-demo[41809]: wp-pv-claim Jan 04 11:34:25 managed-node2 quadlet-demo[41809]: Pod: Jan 04 11:34:25 managed-node2 quadlet-demo[41809]: 56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3 Jan 04 11:34:25 managed-node2 quadlet-demo[41809]: Containers: Jan 04 11:34:25 managed-node2 quadlet-demo[41809]: 99436d65d89af03f14af27dcb71779c19cd57b99f6b1b50d15c2382093ca4c08 Jan 04 11:34:25 managed-node2 quadlet-demo[41809]: 23c3f79e2bc8df0b3cda71af6c29855118c1459ae9e282ca023ba08f99e48001 Jan 04 11:34:26 managed-node2 python3.12[42060]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/containers/systemd _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:34:26 managed-node2 python3.12[42267]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:34:26 managed-node2 python3.12[42466]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:34:27 managed-node2 python3.12[42610]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:34:27 managed-node2 python3.12[42750]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:34:28 managed-node2 python3.12[42884]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:34:33 managed-node2 python3.12[43015]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:34:39 managed-node2 python3.12[43146]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:34:44 managed-node2 python3.12[43277]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:34:50 managed-node2 python3.12[43408]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:34:55 managed-node2 podman[43517]: 2025-01-04 11:34:55.652880074 -0500 EST m=+0.122337576 container health_status 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 04 11:34:55 managed-node2 python3.12[43546]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:35:01 managed-node2 python3.12[43687]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:35:01 managed-node2 python3.12[43818]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None TASK [Check] ******************************************************************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:148 Saturday 04 January 2025 11:35:01 -0500 (0:00:00.603) 0:01:58.149 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "-a" ], "delta": "0:00:00.038459", "end": "2025-01-04 11:35:02.314065", "rc": 0, "start": "2025-01-04 11:35:02.275606" } STDOUT: CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 23724cd2b98c localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes c8c001b59877-service dbc2b9607a4c localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15002->80/tcp ecaac7b287a8-infra f27f043c4df7 quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 ea65b4c142a1 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 8c5a350fe43d-service 196161f20f83 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp a93b7ede24ca-infra cba46159e385 quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 1ce48534b65f quay.io/linux-system-roles/mysql:5.6 mysqld About a minute ago Up About a minute (healthy) 3306/tcp quadlet-demo-mysql f07164ee3e90 localhost/podman-pause:5.3.1-1733097600 37 seconds ago Up 37 seconds a96f3a51b8d1-service 65b9ce253bc1 localhost/podman-pause:5.3.1-1733097600 37 seconds ago Up 37 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp 56bbf7fc2aa4-infra 99436d65d89a quay.io/linux-system-roles/wordpress:4.8-apache apache2-foregroun... 37 seconds ago Up 37 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 80/tcp quadlet-demo-wordpress 23c3f79e2bc8 quay.io/linux-system-roles/envoyproxy:v1.25.0 envoy -c /etc/env... 37 seconds ago Up 37 seconds 0.0.0.0:8000->8080/tcp, 0.0.0.0:9000->9901/tcp, 10000/tcp quadlet-demo-envoy TASK [Check pods] ************************************************************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:152 Saturday 04 January 2025 11:35:02 -0500 (0:00:00.517) 0:01:58.666 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "pod", "ps", "--ctr-ids", "--ctr-names", "--ctr-status" ], "delta": "0:00:00.038203", "end": "2025-01-04 11:35:02.807736", "failed_when_result": false, "rc": 0, "start": "2025-01-04 11:35:02.769533" } STDOUT: POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS 56bbf7fc2aa4 quadlet-demo Running 37 seconds ago 65b9ce253bc1 65b9ce253bc1,99436d65d89a,23c3f79e2bc8 56bbf7fc2aa4-infra,quadlet-demo-wordpress,quadlet-demo-envoy running,running,running a93b7ede24ca httpd3 Running 2 minutes ago 196161f20f83 196161f20f83,cba46159e385 a93b7ede24ca-infra,httpd3-httpd3 running,running ecaac7b287a8 httpd2 Running 2 minutes ago dbc2b9607a4c dbc2b9607a4c,f27f043c4df7 ecaac7b287a8-infra,httpd2-httpd2 running,running TASK [Check systemd] *********************************************************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:157 Saturday 04 January 2025 11:35:02 -0500 (0:00:00.448) 0:01:59.115 ****** ok: [managed-node2] => { "changed": false, "cmd": "set -euo pipefail; systemctl list-units --all | grep quadlet", "delta": "0:00:00.014965", "end": "2025-01-04 11:35:03.190197", "failed_when_result": false, "rc": 0, "start": "2025-01-04 11:35:03.175232" } STDOUT: quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service quadlet-demo-mysql.service loaded active running quadlet-demo-mysql.service quadlet-demo-network.service loaded active exited quadlet-demo-network.service quadlet-demo.service loaded active running quadlet-demo.service TASK [LS] ********************************************************************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:165 Saturday 04 January 2025 11:35:03 -0500 (0:00:00.391) 0:01:59.506 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "ls", "-alrtF", "/etc/systemd/system" ], "delta": "0:00:00.004124", "end": "2025-01-04 11:35:03.563773", "failed_when_result": false, "rc": 0, "start": "2025-01-04 11:35:03.559649" } STDOUT: total 12 drwxr-xr-x. 5 root root 47 Dec 20 02:21 ../ lrwxrwxrwx. 1 root root 43 Dec 20 02:21 dbus.service -> /usr/lib/systemd/system/dbus-broker.service drwxr-xr-x. 2 root root 32 Dec 20 02:21 getty.target.wants/ lrwxrwxrwx. 1 root root 37 Dec 20 02:21 ctrl-alt-del.target -> /usr/lib/systemd/system/reboot.target drwxr-xr-x. 2 root root 48 Dec 20 02:22 network-online.target.wants/ lrwxrwxrwx. 1 root root 57 Dec 20 02:22 dbus-org.freedesktop.nm-dispatcher.service -> /usr/lib/systemd/system/NetworkManager-dispatcher.service drwxr-xr-x. 2 root root 76 Dec 20 02:22 timers.target.wants/ drwxr-xr-x. 2 root root 38 Dec 20 02:22 dev-virtio\x2dports-org.qemu.guest_agent.0.device.wants/ lrwxrwxrwx. 1 root root 41 Dec 20 02:25 default.target -> /usr/lib/systemd/system/multi-user.target drwxr-xr-x. 2 root root 31 Dec 20 02:37 remote-fs.target.wants/ drwxr-xr-x. 2 root root 119 Dec 20 02:38 cloud-init.target.wants/ drwxr-xr-x. 2 root root 4096 Dec 20 02:38 sysinit.target.wants/ drwxr-xr-x. 2 root root 113 Jan 4 11:29 sockets.target.wants/ lrwxrwxrwx. 1 root root 41 Jan 4 11:29 dbus-org.fedoraproject.FirewallD1.service -> /usr/lib/systemd/system/firewalld.service drwxr-xr-x. 12 root root 4096 Jan 4 11:32 ./ drwxr-xr-x. 2 root root 162 Jan 4 11:32 default.target.wants/ drwxr-xr-x. 2 root root 4096 Jan 4 11:33 multi-user.target.wants/ TASK [Cleanup] ***************************************************************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:172 Saturday 04 January 2025 11:35:03 -0500 (0:00:00.351) 0:01:59.858 ****** included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 04 January 2025 11:35:03 -0500 (0:00:00.073) 0:01:59.932 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 04 January 2025 11:35:03 -0500 (0:00:00.087) 0:02:00.019 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 04 January 2025 11:35:03 -0500 (0:00:00.037) 0:02:00.057 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 04 January 2025 11:35:03 -0500 (0:00:00.030) 0:02:00.087 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 04 January 2025 11:35:03 -0500 (0:00:00.028) 0:02:00.116 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 04 January 2025 11:35:03 -0500 (0:00:00.030) 0:02:00.147 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 04 January 2025 11:35:03 -0500 (0:00:00.029) 0:02:00.176 ****** ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 04 January 2025 11:35:04 -0500 (0:00:00.076) 0:02:00.253 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 04 January 2025 11:35:04 -0500 (0:00:00.739) 0:02:00.992 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 04 January 2025 11:35:04 -0500 (0:00:00.030) 0:02:01.022 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 04 January 2025 11:35:04 -0500 (0:00:00.032) 0:02:01.055 ****** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 04 January 2025 11:35:04 -0500 (0:00:00.028) 0:02:01.084 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 04 January 2025 11:35:04 -0500 (0:00:00.029) 0:02:01.114 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 04 January 2025 11:35:04 -0500 (0:00:00.029) 0:02:01.144 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.024561", "end": "2025-01-04 11:35:05.233862", "rc": 0, "start": "2025-01-04 11:35:05.209301" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 04 January 2025 11:35:05 -0500 (0:00:00.389) 0:02:01.534 ****** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 04 January 2025 11:35:05 -0500 (0:00:00.032) 0:02:01.566 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 04 January 2025 11:35:05 -0500 (0:00:00.029) 0:02:01.596 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 04 January 2025 11:35:05 -0500 (0:00:00.035) 0:02:01.631 ****** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 04 January 2025 11:35:05 -0500 (0:00:00.037) 0:02:01.669 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 04 January 2025 11:35:05 -0500 (0:00:00.051) 0:02:01.721 ****** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 04 January 2025 11:35:05 -0500 (0:00:00.087) 0:02:01.808 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 04 January 2025 11:35:05 -0500 (0:00:00.060) 0:02:01.868 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 04 January 2025 11:35:05 -0500 (0:00:00.034) 0:02:01.903 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 04 January 2025 11:35:05 -0500 (0:00:00.034) 0:02:01.937 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 04 January 2025 11:35:05 -0500 (0:00:00.042) 0:02:01.980 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736008187.927243, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736008167.5443802, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3978348631", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.356) 0:02:02.336 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.030) 0:02:02.366 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.030) 0:02:02.397 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.030) 0:02:02.428 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.031) 0:02:02.460 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.028) 0:02:02.489 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.030) 0:02:02.519 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.028) 0:02:02.548 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.029) 0:02:02.578 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.038) 0:02:02.616 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.056) 0:02:02.673 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.031) 0:02:02.704 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.030) 0:02:02.735 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.094) 0:02:02.829 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.032) 0:02:02.862 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.031) 0:02:02.893 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.056) 0:02:02.950 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.031) 0:02:02.981 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.030) 0:02:03.011 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.060) 0:02:03.072 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.030) 0:02:03.103 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.031) 0:02:03.134 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.030) 0:02:03.164 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 04 January 2025 11:35:06 -0500 (0:00:00.032) 0:02:03.196 ****** included: fedora.linux_system_roles.firewall for managed-node2 TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 04 January 2025 11:35:07 -0500 (0:00:00.107) 0:02:03.304 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Saturday 04 January 2025 11:35:07 -0500 (0:00:00.055) 0:02:03.359 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Saturday 04 January 2025 11:35:07 -0500 (0:00:00.037) 0:02:03.397 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Saturday 04 January 2025 11:35:07 -0500 (0:00:00.029) 0:02:03.426 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 04 January 2025 11:35:07 -0500 (0:00:00.070) 0:02:03.496 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Saturday 04 January 2025 11:35:07 -0500 (0:00:00.031) 0:02:03.527 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Saturday 04 January 2025 11:35:07 -0500 (0:00:00.030) 0:02:03.558 ****** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Saturday 04 January 2025 11:35:08 -0500 (0:00:00.687) 0:02:04.245 ****** skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Saturday 04 January 2025 11:35:08 -0500 (0:00:00.032) 0:02:04.278 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Saturday 04 January 2025 11:35:08 -0500 (0:00:00.037) 0:02:04.315 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 04 January 2025 11:35:08 -0500 (0:00:00.031) 0:02:04.347 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Saturday 04 January 2025 11:35:08 -0500 (0:00:00.029) 0:02:04.377 ****** skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Saturday 04 January 2025 11:35:08 -0500 (0:00:00.037) 0:02:04.414 ****** ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-04 11:29:53 EST", "ActiveEnterTimestampMonotonic": "332301083", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target dbus.socket sysinit.target system.slice dbus-broker.service polkit.service", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-04 11:29:52 EST", "AssertTimestampMonotonic": "332047601", "Before": "multi-user.target network-pre.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "599889000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-04 11:29:52 EST", "ConditionTimestampMonotonic": "332047598", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service ip6tables.service ipset.service iptables.service shutdown.target", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4677", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698245632", "EffectiveMemoryMax": "3698245632", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22349", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2025-01-04 11:29:52 EST", "ExecMainHandoffTimestampMonotonic": "332075596", "ExecMainPID": "11162", "ExecMainStartTimestamp": "Sat 2025-01-04 11:29:52 EST", "ExecMainStartTimestampMonotonic": "332050215", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-04 11:29:52 EST", "InactiveExitTimestampMonotonic": "332050697", "InvocationID": "c67f28ddb09e45a3b56b475ce0c9fcda", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "11162", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2506870784", "MemoryCurrent": "35319808", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "35598336", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-04 11:34:24 EST", "StateChangeTimestampMonotonic": "603547255", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Saturday 04 January 2025 11:35:08 -0500 (0:00:00.516) 0:02:04.930 ****** ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-04 11:29:53 EST", "ActiveEnterTimestampMonotonic": "332301083", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target dbus.socket sysinit.target system.slice dbus-broker.service polkit.service", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-04 11:29:52 EST", "AssertTimestampMonotonic": "332047601", "Before": "multi-user.target network-pre.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "599889000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-04 11:29:52 EST", "ConditionTimestampMonotonic": "332047598", "ConfigurationDirectoryMode": "0755", "Conflicts": "ebtables.service ip6tables.service ipset.service iptables.service shutdown.target", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4677", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698245632", "EffectiveMemoryMax": "3698245632", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22349", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2025-01-04 11:29:52 EST", "ExecMainHandoffTimestampMonotonic": "332075596", "ExecMainPID": "11162", "ExecMainStartTimestamp": "Sat 2025-01-04 11:29:52 EST", "ExecMainStartTimestampMonotonic": "332050215", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-04 11:29:52 EST", "InactiveExitTimestampMonotonic": "332050697", "InvocationID": "c67f28ddb09e45a3b56b475ce0c9fcda", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "11162", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2509987840", "MemoryCurrent": "35319808", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "35598336", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-04 11:34:24 EST", "StateChangeTimestampMonotonic": "603547255", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Saturday 04 January 2025 11:35:09 -0500 (0:00:00.518) 0:02:05.449 ****** ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.12", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Saturday 04 January 2025 11:35:09 -0500 (0:00:00.044) 0:02:05.494 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Saturday 04 January 2025 11:35:09 -0500 (0:00:00.046) 0:02:05.540 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Saturday 04 January 2025 11:35:09 -0500 (0:00:00.048) 0:02:05.589 ****** ok: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "8000/tcp", "state": "enabled" } } ok: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "9000/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Saturday 04 January 2025 11:35:10 -0500 (0:00:00.964) 0:02:06.553 ****** skipping: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "8000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "9000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Saturday 04 January 2025 11:35:10 -0500 (0:00:00.048) 0:02:06.601 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall | length == 1", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Saturday 04 January 2025 11:35:10 -0500 (0:00:00.035) 0:02:06.637 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Saturday 04 January 2025 11:35:10 -0500 (0:00:00.032) 0:02:06.669 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Saturday 04 January 2025 11:35:10 -0500 (0:00:00.032) 0:02:06.702 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Saturday 04 January 2025 11:35:10 -0500 (0:00:00.030) 0:02:06.732 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Saturday 04 January 2025 11:35:10 -0500 (0:00:00.029) 0:02:06.762 ****** skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 04 January 2025 11:35:10 -0500 (0:00:00.083) 0:02:06.845 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 04 January 2025 11:35:10 -0500 (0:00:00.033) 0:02:06.878 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 04 January 2025 11:35:10 -0500 (0:00:00.033) 0:02:06.911 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 04 January 2025 11:35:10 -0500 (0:00:00.026) 0:02:06.938 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 04 January 2025 11:35:10 -0500 (0:00:00.030) 0:02:06.968 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 04 January 2025 11:35:10 -0500 (0:00:00.168) 0:02:07.137 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 04 January 2025 11:35:10 -0500 (0:00:00.047) 0:02:07.184 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13 Saturday 04 January 2025 11:35:11 -0500 (0:00:00.061) 0:02:07.245 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 04 January 2025 11:35:11 -0500 (0:00:00.062) 0:02:07.308 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 04 January 2025 11:35:11 -0500 (0:00:00.040) 0:02:07.349 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 04 January 2025 11:35:11 -0500 (0:00:00.030) 0:02:07.379 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18 Saturday 04 January 2025 11:35:11 -0500 (0:00:00.030) 0:02:07.410 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34 Saturday 04 January 2025 11:35:11 -0500 (0:00:00.029) 0:02:07.439 ****** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 04 January 2025 11:35:11 -0500 (0:00:00.441) 0:02:07.881 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 04 January 2025 11:35:11 -0500 (0:00:00.058) 0:02:07.939 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13 Saturday 04 January 2025 11:35:11 -0500 (0:00:00.064) 0:02:08.004 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 04 January 2025 11:35:11 -0500 (0:00:00.097) 0:02:08.101 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 04 January 2025 11:35:11 -0500 (0:00:00.063) 0:02:08.165 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 04 January 2025 11:35:12 -0500 (0:00:00.085) 0:02:08.250 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18 Saturday 04 January 2025 11:35:12 -0500 (0:00:00.037) 0:02:08.287 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34 Saturday 04 January 2025 11:35:12 -0500 (0:00:00.034) 0:02:08.322 ****** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 04 January 2025 11:35:12 -0500 (0:00:00.419) 0:02:08.742 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 04 January 2025 11:35:12 -0500 (0:00:00.036) 0:02:08.779 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:13 Saturday 04 January 2025 11:35:12 -0500 (0:00:00.037) 0:02:08.817 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 04 January 2025 11:35:12 -0500 (0:00:00.052) 0:02:08.870 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 04 January 2025 11:35:12 -0500 (0:00:00.049) 0:02:08.920 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 04 January 2025 11:35:12 -0500 (0:00:00.036) 0:02:08.956 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:18 Saturday 04 January 2025 11:35:12 -0500 (0:00:00.032) 0:02:08.989 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:34 Saturday 04 January 2025 11:35:12 -0500 (0:00:00.032) 0:02:09.021 ****** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 04 January 2025 11:35:13 -0500 (0:00:00.425) 0:02:09.446 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 04 January 2025 11:35:13 -0500 (0:00:00.030) 0:02:09.477 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 04 January 2025 11:35:13 -0500 (0:00:00.161) 0:02:09.639 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.kube", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 04 January 2025 11:35:13 -0500 (0:00:00.043) 0:02:09.682 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 04 January 2025 11:35:13 -0500 (0:00:00.037) 0:02:09.720 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 04 January 2025 11:35:13 -0500 (0:00:00.031) 0:02:09.751 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "kube", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 04 January 2025 11:35:13 -0500 (0:00:00.047) 0:02:09.799 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 04 January 2025 11:35:13 -0500 (0:00:00.149) 0:02:09.948 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 04 January 2025 11:35:13 -0500 (0:00:00.057) 0:02:10.006 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 04 January 2025 11:35:13 -0500 (0:00:00.053) 0:02:10.060 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 04 January 2025 11:35:13 -0500 (0:00:00.087) 0:02:10.148 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736008187.927243, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736008167.5443802, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3978348631", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 04 January 2025 11:35:14 -0500 (0:00:00.444) 0:02:10.593 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 04 January 2025 11:35:14 -0500 (0:00:00.034) 0:02:10.627 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 04 January 2025 11:35:14 -0500 (0:00:00.035) 0:02:10.663 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 04 January 2025 11:35:14 -0500 (0:00:00.039) 0:02:10.703 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 04 January 2025 11:35:14 -0500 (0:00:00.032) 0:02:10.735 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 04 January 2025 11:35:14 -0500 (0:00:00.029) 0:02:10.765 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 04 January 2025 11:35:14 -0500 (0:00:00.028) 0:02:10.793 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 04 January 2025 11:35:14 -0500 (0:00:00.029) 0:02:10.823 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 04 January 2025 11:35:14 -0500 (0:00:00.031) 0:02:10.854 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": [ "quadlet-demo.yml" ], "__podman_service_name": "quadlet-demo.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 04 January 2025 11:35:14 -0500 (0:00:00.062) 0:02:10.916 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 04 January 2025 11:35:14 -0500 (0:00:00.037) 0:02:10.953 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 04 January 2025 11:35:14 -0500 (0:00:00.035) 0:02:10.989 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 04 January 2025 11:35:14 -0500 (0:00:00.076) 0:02:11.065 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 04 January 2025 11:35:14 -0500 (0:00:00.036) 0:02:11.101 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 04 January 2025 11:35:14 -0500 (0:00:00.070) 0:02:11.172 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 04 January 2025 11:35:15 -0500 (0:00:00.073) 0:02:11.246 ****** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-04 11:34:25 EST", "ActiveEnterTimestampMonotonic": "604659255", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target network-online.target -.mount sysinit.target systemd-journald.socket system.slice quadlet-demo-network.service quadlet-demo-mysql.service", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-04 11:34:24 EST", "AssertTimestampMonotonic": "604153246", "Before": "shutdown.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "237486000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-04 11:34:24 EST", "ConditionTimestampMonotonic": "604153243", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/quadlet-demo.service", "ControlGroupId": "11607", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698245632", "EffectiveMemoryMax": "3698245632", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "41819", "ExecMainStartTimestamp": "Sat 2025-01-04 11:34:25 EST", "ExecMainStartTimestampMonotonic": "604659211", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[Sat 2025-01-04 11:34:24 EST] ; stop_time=[n/a] ; pid=41809 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[Sat 2025-01-04 11:34:24 EST] ; stop_time=[n/a] ; pid=41809 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-04 11:34:24 EST", "InactiveExitTimestampMonotonic": "604155150", "InvocationID": "e98b1f5516ce46a8b01f773c10d8586e", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "41819", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2495041536", "MemoryCurrent": "2961408", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "25796608", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "quadlet-demo-mysql.service system.slice sysinit.target -.mount quadlet-demo-network.service", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.kube", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-04 11:34:25 EST", "StateChangeTimestampMonotonic": "604659255", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "4", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 04 January 2025 11:35:16 -0500 (0:00:01.273) 0:02:12.519 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736008463.5525014, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7", "ctime": 1736008463.5585012, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 524288815, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1736008463.2915025, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo.kube", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 456, "uid": 0, "version": "2478286342", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 04 January 2025 11:35:16 -0500 (0:00:00.391) 0:02:12.910 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 04 January 2025 11:35:16 -0500 (0:00:00.063) 0:02:12.974 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 04 January 2025 11:35:17 -0500 (0:00:00.358) 0:02:13.332 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 04 January 2025 11:35:17 -0500 (0:00:00.060) 0:02:13.392 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 04 January 2025 11:35:17 -0500 (0:00:00.031) 0:02:13.423 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 04 January 2025 11:35:17 -0500 (0:00:00.038) 0:02:13.462 ****** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo.kube", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 04 January 2025 11:35:17 -0500 (0:00:00.385) 0:02:13.847 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 04 January 2025 11:35:18 -0500 (0:00:00.878) 0:02:14.725 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 04 January 2025 11:35:18 -0500 (0:00:00.076) 0:02:14.802 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 04 January 2025 11:35:18 -0500 (0:00:00.083) 0:02:14.886 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 04 January 2025 11:35:18 -0500 (0:00:00.067) 0:02:14.953 ****** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.698376", "end": "2025-01-04 11:35:19.743700", "rc": 0, "start": "2025-01-04 11:35:19.045324" } STDOUT: fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 04 January 2025 11:35:19 -0500 (0:00:01.098) 0:02:16.051 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 04 January 2025 11:35:19 -0500 (0:00:00.068) 0:02:16.120 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 04 January 2025 11:35:19 -0500 (0:00:00.029) 0:02:16.149 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 04 January 2025 11:35:19 -0500 (0:00:00.029) 0:02:16.178 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 04 January 2025 11:35:19 -0500 (0:00:00.027) 0:02:16.206 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.037086", "end": "2025-01-04 11:35:20.321315", "rc": 0, "start": "2025-01-04 11:35:20.284229" } STDOUT: localhost/podman-pause 5.3.1-1733097600 c95b7a4fdf05 5 minutes ago 701 kB quay.io/linux-system-roles/mysql 5.6 dd3b2a5dcb48 3 years ago 308 MB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 04 January 2025 11:35:20 -0500 (0:00:00.436) 0:02:16.643 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.028501", "end": "2025-01-04 11:35:20.797577", "rc": 0, "start": "2025-01-04 11:35:20.769076" } STDOUT: local systemd-quadlet-demo-mysql local wp-pv-claim local envoy-proxy-config local envoy-certificates TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 04 January 2025 11:35:20 -0500 (0:00:00.507) 0:02:17.150 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.036353", "end": "2025-01-04 11:35:21.314424", "rc": 0, "start": "2025-01-04 11:35:21.278071" } STDOUT: 23724cd2b98c localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes c8c001b59877-service dbc2b9607a4c localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15002->80/tcp ecaac7b287a8-infra f27f043c4df7 quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 ea65b4c142a1 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 8c5a350fe43d-service 196161f20f83 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp a93b7ede24ca-infra cba46159e385 quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 1ce48534b65f quay.io/linux-system-roles/mysql:5.6 mysqld About a minute ago Up About a minute (healthy) 3306/tcp quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 04 January 2025 11:35:21 -0500 (0:00:00.523) 0:02:17.674 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.026048", "end": "2025-01-04 11:35:21.790583", "rc": 0, "start": "2025-01-04 11:35:21.764535" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 04 January 2025 11:35:21 -0500 (0:00:00.416) 0:02:18.090 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 04 January 2025 11:35:22 -0500 (0:00:00.404) 0:02:18.494 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 04 January 2025 11:35:22 -0500 (0:00:00.412) 0:02:18.907 ****** ok: [managed-node2] => { "ansible_facts": { "services": { "1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-1404942fa2db9863.service": { "name": "1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-1404942fa2db9863.service", "source": "systemd", "state": "stopped", "status": "transient" }, "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "running", "status": "generated" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 04 January 2025 11:35:24 -0500 (0:00:02.162) 0:02:21.069 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 04 January 2025 11:35:24 -0500 (0:00:00.059) 0:02:21.129 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: PersistentVolumeClaim\nmetadata:\n name: wp-pv-claim\n labels:\n app: wordpress\nspec:\n accessModes:\n - ReadWriteOnce\n resources:\n requests:\n storage: 20Gi\n---\napiVersion: v1\nkind: Pod\nmetadata:\n name: quadlet-demo\nspec:\n containers:\n - name: wordpress\n image: quay.io/linux-system-roles/wordpress:4.8-apache\n env:\n - name: WORDPRESS_DB_HOST\n value: quadlet-demo-mysql\n - name: WORDPRESS_DB_PASSWORD\n valueFrom:\n secretKeyRef:\n name: mysql-root-password-kube\n key: password\n volumeMounts:\n - name: wordpress-persistent-storage\n mountPath: /var/www/html\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n - name: envoy\n image: quay.io/linux-system-roles/envoyproxy:v1.25.0\n volumeMounts:\n - name: config-volume\n mountPath: /etc/envoy\n - name: certificates\n mountPath: /etc/envoy-certificates\n env:\n - name: ENVOY_UID\n value: \"0\"\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n volumes:\n - name: config-volume\n configMap:\n name: envoy-proxy-config\n - name: certificates\n secret:\n secretName: envoy-certificates\n - name: wordpress-persistent-storage\n persistentVolumeClaim:\n claimName: wp-pv-claim\n - name: www # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3\n - name: create # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3-create\n", "__podman_quadlet_template_src": "quadlet-demo.yml.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 04 January 2025 11:35:25 -0500 (0:00:00.177) 0:02:21.307 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 04 January 2025 11:35:25 -0500 (0:00:00.074) 0:02:21.381 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 04 January 2025 11:35:25 -0500 (0:00:00.040) 0:02:21.421 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 04 January 2025 11:35:25 -0500 (0:00:00.061) 0:02:21.483 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 04 January 2025 11:35:25 -0500 (0:00:00.067) 0:02:21.551 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 04 January 2025 11:35:25 -0500 (0:00:00.045) 0:02:21.596 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 04 January 2025 11:35:25 -0500 (0:00:00.038) 0:02:21.634 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 04 January 2025 11:35:25 -0500 (0:00:00.044) 0:02:21.679 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736008187.927243, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736008167.5443802, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3978348631", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 04 January 2025 11:35:25 -0500 (0:00:00.426) 0:02:22.105 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 04 January 2025 11:35:25 -0500 (0:00:00.123) 0:02:22.228 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 04 January 2025 11:35:26 -0500 (0:00:00.055) 0:02:22.283 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 04 January 2025 11:35:26 -0500 (0:00:00.054) 0:02:22.338 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 04 January 2025 11:35:26 -0500 (0:00:00.060) 0:02:22.399 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 04 January 2025 11:35:26 -0500 (0:00:00.053) 0:02:22.452 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 04 January 2025 11:35:26 -0500 (0:00:00.058) 0:02:22.510 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 04 January 2025 11:35:26 -0500 (0:00:00.057) 0:02:22.568 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 04 January 2025 11:35:26 -0500 (0:00:00.054) 0:02:22.622 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 04 January 2025 11:35:26 -0500 (0:00:00.064) 0:02:22.687 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 04 January 2025 11:35:26 -0500 (0:00:00.045) 0:02:22.733 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 04 January 2025 11:35:26 -0500 (0:00:00.037) 0:02:22.770 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 04 January 2025 11:35:26 -0500 (0:00:00.077) 0:02:22.847 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 04 January 2025 11:35:26 -0500 (0:00:00.039) 0:02:22.887 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 04 January 2025 11:35:26 -0500 (0:00:00.080) 0:02:22.968 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 04 January 2025 11:35:26 -0500 (0:00:00.057) 0:02:23.025 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 04 January 2025 11:35:26 -0500 (0:00:00.042) 0:02:23.068 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736008444.3045967, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "998dccde0483b1654327a46ddd89cbaa47650370", "ctime": 1736008441.3286119, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 469762263, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1736008440.9396138, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo.yml", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1605, "uid": 0, "version": "4059453868", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 04 January 2025 11:35:27 -0500 (0:00:00.431) 0:02:23.499 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 04 January 2025 11:35:27 -0500 (0:00:00.070) 0:02:23.570 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 04 January 2025 11:35:27 -0500 (0:00:00.376) 0:02:23.947 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 04 January 2025 11:35:27 -0500 (0:00:00.060) 0:02:24.007 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 04 January 2025 11:35:27 -0500 (0:00:00.075) 0:02:24.083 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 04 January 2025 11:35:27 -0500 (0:00:00.060) 0:02:24.143 ****** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo.yml", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 04 January 2025 11:35:28 -0500 (0:00:00.391) 0:02:24.535 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 04 January 2025 11:35:29 -0500 (0:00:00.764) 0:02:25.300 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 04 January 2025 11:35:29 -0500 (0:00:00.058) 0:02:25.358 ****** changed: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 04 January 2025 11:35:30 -0500 (0:00:01.252) 0:02:26.610 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 04 January 2025 11:35:30 -0500 (0:00:00.073) 0:02:26.684 ****** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.027946", "end": "2025-01-04 11:35:30.813018", "rc": 0, "start": "2025-01-04 11:35:30.785072" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 04 January 2025 11:35:30 -0500 (0:00:00.430) 0:02:27.115 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 04 January 2025 11:35:30 -0500 (0:00:00.060) 0:02:27.176 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 04 January 2025 11:35:30 -0500 (0:00:00.033) 0:02:27.209 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 04 January 2025 11:35:31 -0500 (0:00:00.041) 0:02:27.250 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 04 January 2025 11:35:31 -0500 (0:00:00.107) 0:02:27.358 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.030231", "end": "2025-01-04 11:35:31.457814", "rc": 0, "start": "2025-01-04 11:35:31.427583" } STDOUT: localhost/podman-pause 5.3.1-1733097600 c95b7a4fdf05 5 minutes ago 701 kB quay.io/linux-system-roles/mysql 5.6 dd3b2a5dcb48 3 years ago 308 MB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 04 January 2025 11:35:31 -0500 (0:00:00.407) 0:02:27.766 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.027895", "end": "2025-01-04 11:35:31.859814", "rc": 0, "start": "2025-01-04 11:35:31.831919" } STDOUT: local systemd-quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 04 January 2025 11:35:31 -0500 (0:00:00.416) 0:02:28.182 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.035399", "end": "2025-01-04 11:35:32.319196", "rc": 0, "start": "2025-01-04 11:35:32.283797" } STDOUT: 23724cd2b98c localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes c8c001b59877-service dbc2b9607a4c localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15002->80/tcp ecaac7b287a8-infra f27f043c4df7 quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 ea65b4c142a1 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 8c5a350fe43d-service 196161f20f83 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp a93b7ede24ca-infra cba46159e385 quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 1ce48534b65f quay.io/linux-system-roles/mysql:5.6 mysqld About a minute ago Up About a minute (healthy) 3306/tcp quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 04 January 2025 11:35:32 -0500 (0:00:00.457) 0:02:28.640 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.026643", "end": "2025-01-04 11:35:32.769568", "rc": 0, "start": "2025-01-04 11:35:32.742925" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 04 January 2025 11:35:32 -0500 (0:00:00.430) 0:02:29.070 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 04 January 2025 11:35:33 -0500 (0:00:00.441) 0:02:29.512 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 04 January 2025 11:35:33 -0500 (0:00:00.442) 0:02:29.954 ****** ok: [managed-node2] => { "ansible_facts": { "services": { "1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-1404942fa2db9863.service": { "name": "1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-1404942fa2db9863.service", "source": "systemd", "state": "stopped", "status": "failed" }, "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "running", "status": "generated" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 04 January 2025 11:35:35 -0500 (0:00:01.947) 0:02:31.902 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 04 January 2025 11:35:35 -0500 (0:00:00.033) 0:02:31.935 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "envoy-proxy-configmap.yml", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: ConfigMap\nmetadata:\n name: envoy-proxy-config\ndata:\n envoy.yaml: |\n admin:\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 9901\n\n static_resources:\n listeners:\n - name: listener_0\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 8080\n filter_chains:\n - filters:\n - name: envoy.filters.network.http_connection_manager\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager\n stat_prefix: ingress_http\n codec_type: AUTO\n route_config:\n name: local_route\n virtual_hosts:\n - name: local_service\n domains: [\"*\"]\n routes:\n - match:\n prefix: \"/\"\n route:\n cluster: backend\n http_filters:\n - name: envoy.filters.http.router\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router\n transport_socket:\n name: envoy.transport_sockets.tls\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext\n common_tls_context:\n tls_certificates:\n - certificate_chain:\n filename: /etc/envoy-certificates/certificate.pem\n private_key:\n filename: /etc/envoy-certificates/certificate.key\n clusters:\n - name: backend\n connect_timeout: 5s\n type: STATIC\n dns_refresh_rate: 1800s\n lb_policy: ROUND_ROBIN\n load_assignment:\n cluster_name: backend\n endpoints:\n - lb_endpoints:\n - endpoint:\n address:\n socket_address:\n address: 127.0.0.1\n port_value: 80", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 04 January 2025 11:35:35 -0500 (0:00:00.047) 0:02:31.982 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 04 January 2025 11:35:35 -0500 (0:00:00.045) 0:02:32.028 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 04 January 2025 11:35:35 -0500 (0:00:00.047) 0:02:32.075 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "envoy-proxy-configmap", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 04 January 2025 11:35:35 -0500 (0:00:00.070) 0:02:32.146 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 04 January 2025 11:35:36 -0500 (0:00:00.107) 0:02:32.253 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 04 January 2025 11:35:36 -0500 (0:00:00.137) 0:02:32.390 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 04 January 2025 11:35:36 -0500 (0:00:00.064) 0:02:32.455 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 04 January 2025 11:35:36 -0500 (0:00:00.095) 0:02:32.551 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736008187.927243, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736008167.5443802, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3978348631", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 04 January 2025 11:35:36 -0500 (0:00:00.437) 0:02:32.988 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 04 January 2025 11:35:36 -0500 (0:00:00.063) 0:02:33.052 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 04 January 2025 11:35:36 -0500 (0:00:00.053) 0:02:33.105 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 04 January 2025 11:35:36 -0500 (0:00:00.072) 0:02:33.178 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 04 January 2025 11:35:37 -0500 (0:00:00.060) 0:02:33.238 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 04 January 2025 11:35:37 -0500 (0:00:00.060) 0:02:33.298 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 04 January 2025 11:35:37 -0500 (0:00:00.056) 0:02:33.355 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 04 January 2025 11:35:37 -0500 (0:00:00.058) 0:02:33.414 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 04 January 2025 11:35:37 -0500 (0:00:00.059) 0:02:33.473 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 04 January 2025 11:35:37 -0500 (0:00:00.095) 0:02:33.568 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 04 January 2025 11:35:37 -0500 (0:00:00.040) 0:02:33.608 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 04 January 2025 11:35:37 -0500 (0:00:00.038) 0:02:33.647 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/envoy-proxy-configmap.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 04 January 2025 11:35:37 -0500 (0:00:00.119) 0:02:33.766 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 04 January 2025 11:35:37 -0500 (0:00:00.053) 0:02:33.820 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 04 January 2025 11:35:37 -0500 (0:00:00.174) 0:02:33.995 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 04 January 2025 11:35:37 -0500 (0:00:00.053) 0:02:34.048 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 04 January 2025 11:35:37 -0500 (0:00:00.044) 0:02:34.092 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736008465.0384948, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "d681c7d56f912150d041873e880818b22a90c188", "ctime": 1736008437.2036326, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 427819226, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1736008436.9026341, "nlink": 1, "path": "/etc/containers/systemd/envoy-proxy-configmap.yml", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 2102, "uid": 0, "version": "889513616", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 04 January 2025 11:35:38 -0500 (0:00:00.399) 0:02:34.492 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 04 January 2025 11:35:38 -0500 (0:00:00.071) 0:02:34.564 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 04 January 2025 11:35:38 -0500 (0:00:00.394) 0:02:34.958 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 04 January 2025 11:35:38 -0500 (0:00:00.064) 0:02:35.022 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 04 January 2025 11:35:38 -0500 (0:00:00.073) 0:02:35.095 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 04 January 2025 11:35:38 -0500 (0:00:00.059) 0:02:35.155 ****** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/envoy-proxy-configmap.yml", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 04 January 2025 11:35:39 -0500 (0:00:00.430) 0:02:35.585 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 04 January 2025 11:35:40 -0500 (0:00:00.796) 0:02:36.382 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 04 January 2025 11:35:40 -0500 (0:00:00.040) 0:02:36.423 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 04 January 2025 11:35:40 -0500 (0:00:00.057) 0:02:36.480 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 04 January 2025 11:35:40 -0500 (0:00:00.038) 0:02:36.519 ****** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.030174", "end": "2025-01-04 11:35:40.609435", "rc": 0, "start": "2025-01-04 11:35:40.579261" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 04 January 2025 11:35:40 -0500 (0:00:00.436) 0:02:36.955 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 04 January 2025 11:35:40 -0500 (0:00:00.061) 0:02:37.016 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 04 January 2025 11:35:40 -0500 (0:00:00.033) 0:02:37.050 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 04 January 2025 11:35:40 -0500 (0:00:00.038) 0:02:37.088 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 04 January 2025 11:35:40 -0500 (0:00:00.051) 0:02:37.139 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.031821", "end": "2025-01-04 11:35:41.238422", "rc": 0, "start": "2025-01-04 11:35:41.206601" } STDOUT: localhost/podman-pause 5.3.1-1733097600 c95b7a4fdf05 5 minutes ago 701 kB quay.io/linux-system-roles/mysql 5.6 dd3b2a5dcb48 3 years ago 308 MB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 04 January 2025 11:35:41 -0500 (0:00:00.420) 0:02:37.560 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.027950", "end": "2025-01-04 11:35:41.679214", "rc": 0, "start": "2025-01-04 11:35:41.651264" } STDOUT: local systemd-quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 04 January 2025 11:35:41 -0500 (0:00:00.473) 0:02:38.034 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.037883", "end": "2025-01-04 11:35:42.193499", "rc": 0, "start": "2025-01-04 11:35:42.155616" } STDOUT: 23724cd2b98c localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes c8c001b59877-service dbc2b9607a4c localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp ecaac7b287a8-infra f27f043c4df7 quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 ea65b4c142a1 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 8c5a350fe43d-service 196161f20f83 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp a93b7ede24ca-infra cba46159e385 quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 1ce48534b65f quay.io/linux-system-roles/mysql:5.6 mysqld About a minute ago Up About a minute (healthy) 3306/tcp quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 04 January 2025 11:35:42 -0500 (0:00:00.498) 0:02:38.533 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.029282", "end": "2025-01-04 11:35:42.658704", "rc": 0, "start": "2025-01-04 11:35:42.629422" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 04 January 2025 11:35:42 -0500 (0:00:00.434) 0:02:38.967 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 04 January 2025 11:35:43 -0500 (0:00:00.395) 0:02:39.363 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 04 January 2025 11:35:43 -0500 (0:00:00.426) 0:02:39.789 ****** ok: [managed-node2] => { "ansible_facts": { "services": { "1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-1404942fa2db9863.service": { "name": "1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-1404942fa2db9863.service", "source": "systemd", "state": "stopped", "status": "failed" }, "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "running", "status": "generated" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 04 January 2025 11:35:45 -0500 (0:00:01.951) 0:02:41.741 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 04 January 2025 11:35:45 -0500 (0:00:00.046) 0:02:41.788 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Container]\nImage=quay.io/linux-system-roles/mysql:5.6\nContainerName=quadlet-demo-mysql\nVolume=quadlet-demo-mysql.volume:/var/lib/mysql\nVolume=/tmp/quadlet_demo:/var/lib/quadlet_demo:Z\nNetwork=quadlet-demo.network\nSecret=mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD\nHealthCmd=/bin/true\nHealthOnFailure=kill\n", "__podman_quadlet_template_src": "quadlet-demo-mysql.container.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 04 January 2025 11:35:45 -0500 (0:00:00.219) 0:02:42.007 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 04 January 2025 11:35:45 -0500 (0:00:00.055) 0:02:42.062 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 04 January 2025 11:35:45 -0500 (0:00:00.091) 0:02:42.154 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 04 January 2025 11:35:45 -0500 (0:00:00.068) 0:02:42.222 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 04 January 2025 11:35:46 -0500 (0:00:00.091) 0:02:42.314 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 04 January 2025 11:35:46 -0500 (0:00:00.047) 0:02:42.361 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 04 January 2025 11:35:46 -0500 (0:00:00.059) 0:02:42.420 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 04 January 2025 11:35:46 -0500 (0:00:00.082) 0:02:42.503 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736008187.927243, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736008167.5443802, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3978348631", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 04 January 2025 11:35:46 -0500 (0:00:00.410) 0:02:42.914 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 04 January 2025 11:35:46 -0500 (0:00:00.076) 0:02:42.990 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 04 January 2025 11:35:46 -0500 (0:00:00.076) 0:02:43.067 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 04 January 2025 11:35:46 -0500 (0:00:00.098) 0:02:43.166 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 04 January 2025 11:35:47 -0500 (0:00:00.070) 0:02:43.236 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 04 January 2025 11:35:47 -0500 (0:00:00.052) 0:02:43.289 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 04 January 2025 11:35:47 -0500 (0:00:00.102) 0:02:43.391 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 04 January 2025 11:35:47 -0500 (0:00:00.064) 0:02:43.456 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 04 January 2025 11:35:47 -0500 (0:00:00.064) 0:02:43.520 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 04 January 2025 11:35:47 -0500 (0:00:00.160) 0:02:43.680 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 04 January 2025 11:35:47 -0500 (0:00:00.056) 0:02:43.737 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 04 January 2025 11:35:47 -0500 (0:00:00.072) 0:02:43.809 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.container", "__podman_volumes": [ "/tmp/quadlet_demo" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 04 January 2025 11:35:47 -0500 (0:00:00.122) 0:02:43.932 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 04 January 2025 11:35:47 -0500 (0:00:00.068) 0:02:44.000 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 04 January 2025 11:35:47 -0500 (0:00:00.092) 0:02:44.093 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 04 January 2025 11:35:47 -0500 (0:00:00.036) 0:02:44.129 ****** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo-mysql.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-04 11:33:54 EST", "ActiveEnterTimestampMonotonic": "573414056", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "sysinit.target network-online.target quadlet-demo-mysql-volume.service systemd-journald.socket basic.target tmp.mount quadlet-demo-network.service -.mount system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-04 11:33:53 EST", "AssertTimestampMonotonic": "573175459", "Before": "shutdown.target multi-user.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "2987652000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-04 11:33:53 EST", "ConditionTimestampMonotonic": "573175455", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/quadlet-demo-mysql.service", "ControlGroupId": "10613", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-demo-mysql.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698245632", "EffectiveMemoryMax": "3698245632", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22349", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "38339", "ExecMainStartTimestamp": "Sat 2025-01-04 11:33:54 EST", "ExecMainStartTimestampMonotonic": "573413991", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-04 11:33:53 EST", "InactiveExitTimestampMonotonic": "573184833", "InvocationID": "9aab529aeb474925aa9b0ee9123be472", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "38339", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "2611408896", "MemoryCurrent": "606380032", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "641740800", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice quadlet-demo-network.service sysinit.target -.mount quadlet-demo-mysql-volume.service", "RequiresMountsFor": "/tmp/quadlet_demo /run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-04 11:33:54 EST", "StateChangeTimestampMonotonic": "573414056", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "23", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 04 January 2025 11:35:50 -0500 (0:00:02.547) 0:02:46.676 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736008432.5956557, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4", "ctime": 1736008432.6016557, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 272631104, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1736008432.344657, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo-mysql.container", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 363, "uid": 0, "version": "3657382814", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 04 January 2025 11:35:50 -0500 (0:00:00.465) 0:02:47.142 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 04 January 2025 11:35:51 -0500 (0:00:00.121) 0:02:47.264 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 04 January 2025 11:35:51 -0500 (0:00:00.370) 0:02:47.634 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 04 January 2025 11:35:51 -0500 (0:00:00.081) 0:02:47.716 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 04 January 2025 11:35:51 -0500 (0:00:00.097) 0:02:47.814 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 04 January 2025 11:35:51 -0500 (0:00:00.084) 0:02:47.898 ****** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo-mysql.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 04 January 2025 11:35:52 -0500 (0:00:00.413) 0:02:48.312 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 04 January 2025 11:35:52 -0500 (0:00:00.864) 0:02:49.176 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 04 January 2025 11:35:53 -0500 (0:00:00.453) 0:02:49.630 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 04 January 2025 11:35:53 -0500 (0:00:00.054) 0:02:49.685 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 04 January 2025 11:35:53 -0500 (0:00:00.034) 0:02:49.719 ****** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.218638", "end": "2025-01-04 11:35:53.997776", "rc": 0, "start": "2025-01-04 11:35:53.779138" } STDOUT: dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 04 January 2025 11:35:54 -0500 (0:00:00.609) 0:02:50.329 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 04 January 2025 11:35:54 -0500 (0:00:00.110) 0:02:50.439 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 04 January 2025 11:35:54 -0500 (0:00:00.049) 0:02:50.489 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 04 January 2025 11:35:54 -0500 (0:00:00.036) 0:02:50.526 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 04 January 2025 11:35:54 -0500 (0:00:00.044) 0:02:50.570 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.029480", "end": "2025-01-04 11:35:54.665151", "rc": 0, "start": "2025-01-04 11:35:54.635671" } STDOUT: localhost/podman-pause 5.3.1-1733097600 c95b7a4fdf05 5 minutes ago 701 kB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 04 January 2025 11:35:54 -0500 (0:00:00.403) 0:02:50.974 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.027388", "end": "2025-01-04 11:35:55.065519", "rc": 0, "start": "2025-01-04 11:35:55.038131" } STDOUT: local systemd-quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 04 January 2025 11:35:55 -0500 (0:00:00.413) 0:02:51.387 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.033480", "end": "2025-01-04 11:35:55.512544", "rc": 0, "start": "2025-01-04 11:35:55.479064" } STDOUT: 23724cd2b98c localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes c8c001b59877-service dbc2b9607a4c localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp ecaac7b287a8-infra f27f043c4df7 quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 ea65b4c142a1 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 8c5a350fe43d-service 196161f20f83 localhost/podman-pause:5.3.1-1733097600 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp a93b7ede24ca-infra cba46159e385 quay.io/libpod/testimage:20210610 2 minutes ago Up 2 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 04 January 2025 11:35:55 -0500 (0:00:00.453) 0:02:51.841 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.026640", "end": "2025-01-04 11:35:55.963763", "rc": 0, "start": "2025-01-04 11:35:55.937123" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 04 January 2025 11:35:56 -0500 (0:00:00.458) 0:02:52.300 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 04 January 2025 11:35:56 -0500 (0:00:00.515) 0:02:52.815 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 04 January 2025 11:35:57 -0500 (0:00:00.468) 0:02:53.284 ****** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "running", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 04 January 2025 11:35:59 -0500 (0:00:02.191) 0:02:55.476 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 04 January 2025 11:35:59 -0500 (0:00:00.062) 0:02:55.538 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo-mysql.volume", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Volume]", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 04 January 2025 11:35:59 -0500 (0:00:00.053) 0:02:55.592 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 04 January 2025 11:35:59 -0500 (0:00:00.051) 0:02:55.643 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 04 January 2025 11:35:59 -0500 (0:00:00.038) 0:02:55.682 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "volume", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 04 January 2025 11:35:59 -0500 (0:00:00.049) 0:02:55.732 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 04 January 2025 11:35:59 -0500 (0:00:00.090) 0:02:55.822 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 04 January 2025 11:35:59 -0500 (0:00:00.072) 0:02:55.894 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 04 January 2025 11:35:59 -0500 (0:00:00.065) 0:02:55.960 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 04 January 2025 11:35:59 -0500 (0:00:00.078) 0:02:56.039 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736008187.927243, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736008167.5443802, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3978348631", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 04 January 2025 11:36:00 -0500 (0:00:00.416) 0:02:56.455 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 04 January 2025 11:36:00 -0500 (0:00:00.062) 0:02:56.518 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 04 January 2025 11:36:00 -0500 (0:00:00.062) 0:02:56.580 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 04 January 2025 11:36:00 -0500 (0:00:00.056) 0:02:56.637 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 04 January 2025 11:36:00 -0500 (0:00:00.058) 0:02:56.696 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 04 January 2025 11:36:00 -0500 (0:00:00.125) 0:02:56.821 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 04 January 2025 11:36:00 -0500 (0:00:00.040) 0:02:56.861 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 04 January 2025 11:36:00 -0500 (0:00:00.041) 0:02:56.903 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 04 January 2025 11:36:00 -0500 (0:00:00.043) 0:02:56.947 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql-volume.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 04 January 2025 11:36:00 -0500 (0:00:00.059) 0:02:57.007 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 04 January 2025 11:36:00 -0500 (0:00:00.036) 0:02:57.043 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 04 January 2025 11:36:00 -0500 (0:00:00.033) 0:02:57.077 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 04 January 2025 11:36:00 -0500 (0:00:00.076) 0:02:57.153 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 04 January 2025 11:36:00 -0500 (0:00:00.040) 0:02:57.193 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 04 January 2025 11:36:01 -0500 (0:00:00.086) 0:02:57.280 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 04 January 2025 11:36:01 -0500 (0:00:00.032) 0:02:57.313 ****** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo-mysql-volume.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-04 11:33:42 EST", "ActiveEnterTimestampMonotonic": "561219382", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "basic.target network-online.target systemd-journald.socket -.mount sysinit.target system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-04 11:33:41 EST", "AssertTimestampMonotonic": "561173775", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-04 11:33:41 EST", "ConditionTimestampMonotonic": "561173771", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-mysql-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698245632", "EffectiveMemoryMax": "3698245632", "EffectiveTasksMax": "22349", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2025-01-04 11:33:42 EST", "ExecMainExitTimestampMonotonic": "561219202", "ExecMainHandoffTimestamp": "Sat 2025-01-04 11:33:41 EST", "ExecMainHandoffTimestampMonotonic": "561183702", "ExecMainPID": "37008", "ExecMainStartTimestamp": "Sat 2025-01-04 11:33:41 EST", "ExecMainStartTimestampMonotonic": "561174607", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-04 11:33:41 EST", "InactiveExitTimestampMonotonic": "561175091", "InvocationID": "59592b2fa86c4dad802508f271cb5cef", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3121344512", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "-.mount sysinit.target system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.volume", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-04 11:33:42 EST", "StateChangeTimestampMonotonic": "561219382", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 04 January 2025 11:36:01 -0500 (0:00:00.795) 0:02:58.108 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736008420.4217172, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "585f8cbdf0ec73000f9227dcffbef71e9552ea4a", "ctime": 1736008420.427717, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 662700248, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1736008420.1697183, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo-mysql.volume", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 9, "uid": 0, "version": "81522673", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 04 January 2025 11:36:02 -0500 (0:00:00.373) 0:02:58.481 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 04 January 2025 11:36:02 -0500 (0:00:00.061) 0:02:58.543 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 04 January 2025 11:36:02 -0500 (0:00:00.407) 0:02:58.951 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 04 January 2025 11:36:02 -0500 (0:00:00.080) 0:02:59.031 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 04 January 2025 11:36:02 -0500 (0:00:00.039) 0:02:59.072 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 04 January 2025 11:36:02 -0500 (0:00:00.039) 0:02:59.111 ****** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo-mysql.volume", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 04 January 2025 11:36:03 -0500 (0:00:00.389) 0:02:59.501 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 04 January 2025 11:36:04 -0500 (0:00:00.739) 0:03:00.240 ****** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 04 January 2025 11:36:04 -0500 (0:00:00.422) 0:03:00.663 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 04 January 2025 11:36:04 -0500 (0:00:00.046) 0:03:00.709 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 04 January 2025 11:36:04 -0500 (0:00:00.036) 0:03:00.746 ****** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.029370", "end": "2025-01-04 11:36:04.837931", "rc": 0, "start": "2025-01-04 11:36:04.808561" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 04 January 2025 11:36:04 -0500 (0:00:00.411) 0:03:01.158 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 04 January 2025 11:36:05 -0500 (0:00:00.085) 0:03:01.243 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 04 January 2025 11:36:05 -0500 (0:00:00.056) 0:03:01.299 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 04 January 2025 11:36:05 -0500 (0:00:00.037) 0:03:01.337 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 04 January 2025 11:36:05 -0500 (0:00:00.034) 0:03:01.371 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.030915", "end": "2025-01-04 11:36:05.471475", "rc": 0, "start": "2025-01-04 11:36:05.440560" } STDOUT: localhost/podman-pause 5.3.1-1733097600 c95b7a4fdf05 5 minutes ago 701 kB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 04 January 2025 11:36:05 -0500 (0:00:00.409) 0:03:01.781 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.026958", "end": "2025-01-04 11:36:05.875218", "rc": 0, "start": "2025-01-04 11:36:05.848260" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 04 January 2025 11:36:05 -0500 (0:00:00.396) 0:03:02.178 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.033853", "end": "2025-01-04 11:36:06.278813", "rc": 0, "start": "2025-01-04 11:36:06.244960" } STDOUT: 23724cd2b98c localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes c8c001b59877-service dbc2b9607a4c localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp ecaac7b287a8-infra f27f043c4df7 quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 ea65b4c142a1 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 8c5a350fe43d-service 196161f20f83 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp a93b7ede24ca-infra cba46159e385 quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 04 January 2025 11:36:06 -0500 (0:00:00.466) 0:03:02.645 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.028313", "end": "2025-01-04 11:36:06.735570", "rc": 0, "start": "2025-01-04 11:36:06.707257" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 04 January 2025 11:36:06 -0500 (0:00:00.397) 0:03:03.042 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 04 January 2025 11:36:07 -0500 (0:00:00.397) 0:03:03.439 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 04 January 2025 11:36:07 -0500 (0:00:00.418) 0:03:03.857 ****** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 04 January 2025 11:36:09 -0500 (0:00:01.870) 0:03:05.727 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 04 January 2025 11:36:09 -0500 (0:00:00.033) 0:03:05.761 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.network", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Network]\nSubnet=192.168.30.0/24\nGateway=192.168.30.1\nLabel=app=wordpress", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 04 January 2025 11:36:09 -0500 (0:00:00.046) 0:03:05.808 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 04 January 2025 11:36:09 -0500 (0:00:00.044) 0:03:05.853 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 04 January 2025 11:36:09 -0500 (0:00:00.037) 0:03:05.890 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "network", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 04 January 2025 11:36:09 -0500 (0:00:00.050) 0:03:05.941 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 04 January 2025 11:36:09 -0500 (0:00:00.066) 0:03:06.007 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 04 January 2025 11:36:09 -0500 (0:00:00.040) 0:03:06.048 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 04 January 2025 11:36:09 -0500 (0:00:00.041) 0:03:06.090 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:28 Saturday 04 January 2025 11:36:09 -0500 (0:00:00.047) 0:03:06.137 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736008187.927243, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736008167.5443802, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9125782, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "3978348631", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:39 Saturday 04 January 2025 11:36:10 -0500 (0:00:00.367) 0:03:06.505 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:44 Saturday 04 January 2025 11:36:10 -0500 (0:00:00.093) 0:03:06.598 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:49 Saturday 04 January 2025 11:36:10 -0500 (0:00:00.034) 0:03:06.633 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:62 Saturday 04 January 2025 11:36:10 -0500 (0:00:00.035) 0:03:06.668 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:67 Saturday 04 January 2025 11:36:10 -0500 (0:00:00.035) 0:03:06.704 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:72 Saturday 04 January 2025 11:36:10 -0500 (0:00:00.034) 0:03:06.739 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:82 Saturday 04 January 2025 11:36:10 -0500 (0:00:00.035) 0:03:06.774 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:89 Saturday 04 January 2025 11:36:10 -0500 (0:00:00.034) 0:03:06.809 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 04 January 2025 11:36:10 -0500 (0:00:00.034) 0:03:06.843 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-network.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 04 January 2025 11:36:10 -0500 (0:00:00.086) 0:03:06.930 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 04 January 2025 11:36:10 -0500 (0:00:00.060) 0:03:06.990 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 04 January 2025 11:36:10 -0500 (0:00:00.054) 0:03:07.045 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 04 January 2025 11:36:10 -0500 (0:00:00.129) 0:03:07.175 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 04 January 2025 11:36:11 -0500 (0:00:00.070) 0:03:07.245 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 04 January 2025 11:36:11 -0500 (0:00:00.133) 0:03:07.379 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 04 January 2025 11:36:11 -0500 (0:00:00.058) 0:03:07.437 ****** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo-network.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-04 11:33:36 EST", "ActiveEnterTimestampMonotonic": "556073279", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "sysinit.target systemd-journald.socket system.slice -.mount basic.target network-online.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-04 11:33:36 EST", "AssertTimestampMonotonic": "556031925", "Before": "shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-04 11:33:36 EST", "ConditionTimestampMonotonic": "556031922", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698245632", "EffectiveMemoryMax": "3698245632", "EffectiveTasksMax": "22349", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2025-01-04 11:33:36 EST", "ExecMainExitTimestampMonotonic": "556073094", "ExecMainHandoffTimestamp": "Sat 2025-01-04 11:33:36 EST", "ExecMainHandoffTimestampMonotonic": "556042418", "ExecMainPID": "36179", "ExecMainStartTimestamp": "Sat 2025-01-04 11:33:36 EST", "ExecMainStartTimestampMonotonic": "556032783", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-04 11:33:36 EST", "InactiveExitTimestampMonotonic": "556033249", "InvocationID": "ce85c5c8a7144a81b4be49da6cd8a9dc", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13968", "LimitNPROCSoft": "13968", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13968", "LimitSIGPENDINGSoft": "13968", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3144245248", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "-.mount sysinit.target system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.network", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-04 11:33:36 EST", "StateChangeTimestampMonotonic": "556073279", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22349", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 04 January 2025 11:36:12 -0500 (0:00:00.911) 0:03:08.348 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736008415.4327424, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "e57c08d49aff4bae8daab138d913aeddaa8682a0", "ctime": 1736008415.4387422, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 616562899, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1736008415.0347443, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo.network", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 74, "uid": 0, "version": "4177444378", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 04 January 2025 11:36:12 -0500 (0:00:00.426) 0:03:08.774 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 04 January 2025 11:36:12 -0500 (0:00:00.083) 0:03:08.858 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 04 January 2025 11:36:12 -0500 (0:00:00.355) 0:03:09.213 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 04 January 2025 11:36:13 -0500 (0:00:00.053) 0:03:09.266 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 04 January 2025 11:36:13 -0500 (0:00:00.034) 0:03:09.300 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 04 January 2025 11:36:13 -0500 (0:00:00.035) 0:03:09.335 ****** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo.network", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 04 January 2025 11:36:13 -0500 (0:00:00.365) 0:03:09.701 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 04 January 2025 11:36:14 -0500 (0:00:00.772) 0:03:10.473 ****** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 04 January 2025 11:36:14 -0500 (0:00:00.446) 0:03:10.919 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 04 January 2025 11:36:14 -0500 (0:00:00.050) 0:03:10.970 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 04 January 2025 11:36:14 -0500 (0:00:00.047) 0:03:11.017 ****** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.027514", "end": "2025-01-04 11:36:15.115766", "rc": 0, "start": "2025-01-04 11:36:15.088252" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 04 January 2025 11:36:15 -0500 (0:00:00.400) 0:03:11.417 ****** included: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 04 January 2025 11:36:15 -0500 (0:00:00.068) 0:03:11.486 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 04 January 2025 11:36:15 -0500 (0:00:00.032) 0:03:11.519 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 04 January 2025 11:36:15 -0500 (0:00:00.081) 0:03:11.600 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 04 January 2025 11:36:15 -0500 (0:00:00.039) 0:03:11.640 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.029748", "end": "2025-01-04 11:36:15.745428", "rc": 0, "start": "2025-01-04 11:36:15.715680" } STDOUT: localhost/podman-pause 5.3.1-1733097600 c95b7a4fdf05 6 minutes ago 701 kB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 3 years ago 7.99 MB TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 04 January 2025 11:36:15 -0500 (0:00:00.411) 0:03:12.052 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.025631", "end": "2025-01-04 11:36:16.149908", "rc": 0, "start": "2025-01-04 11:36:16.124277" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 04 January 2025 11:36:16 -0500 (0:00:00.399) 0:03:12.451 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.034473", "end": "2025-01-04 11:36:16.558452", "rc": 0, "start": "2025-01-04 11:36:16.523979" } STDOUT: 23724cd2b98c localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes c8c001b59877-service dbc2b9607a4c localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp ecaac7b287a8-infra f27f043c4df7 quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15002->80/tcp httpd2-httpd2 ea65b4c142a1 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 8c5a350fe43d-service 196161f20f83 localhost/podman-pause:5.3.1-1733097600 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp a93b7ede24ca-infra cba46159e385 quay.io/libpod/testimage:20210610 3 minutes ago Up 3 minutes 0.0.0.0:15003->80/tcp httpd3-httpd3 TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 04 January 2025 11:36:16 -0500 (0:00:00.428) 0:03:12.880 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.026893", "end": "2025-01-04 11:36:16.973936", "rc": 0, "start": "2025-01-04 11:36:16.947043" } STDOUT: podman podman-default-kube-network TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 04 January 2025 11:36:17 -0500 (0:00:00.419) 0:03:13.299 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 04 January 2025 11:36:17 -0500 (0:00:00.419) 0:03:13.718 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 04 January 2025 11:36:17 -0500 (0:00:00.440) 0:03:14.158 ****** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service": { "name": "podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service", "source": "systemd", "state": "running", "status": "active" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "static" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 04 January 2025 11:36:19 -0500 (0:00:01.890) 0:03:16.049 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 04 January 2025 11:36:19 -0500 (0:00:00.032) 0:03:16.081 ****** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 04 January 2025 11:36:19 -0500 (0:00:00.030) 0:03:16.112 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 04 January 2025 11:36:19 -0500 (0:00:00.030) 0:03:16.142 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Ensure no resources] ***************************************************** task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:188 Saturday 04 January 2025 11:36:19 -0500 (0:00:00.047) 0:03:16.189 ****** fatal: [managed-node2]: FAILED! => { "assertion": "__podman_test_debug_images.stdout == \"\"", "changed": false, "evaluated_to": false } MSG: Assertion failed TASK [Debug] ******************************************************************* task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:199 Saturday 04 January 2025 11:36:19 -0500 (0:00:00.036) 0:03:16.225 ****** ok: [managed-node2] => { "changed": false, "cmd": "exec 1>&2\nset -x\nset -o pipefail\nsystemctl list-units --plain -l --all | grep quadlet || :\nsystemctl list-unit-files --all | grep quadlet || :\nsystemctl list-units --plain --failed -l --all | grep quadlet || :\n", "delta": "0:00:00.380643", "end": "2025-01-04 11:36:20.667452", "rc": 0, "start": "2025-01-04 11:36:20.286809" } STDERR: + set -o pipefail + systemctl list-units --plain -l --all + grep quadlet + : + systemctl list-unit-files --all + grep quadlet + : + grep quadlet + systemctl list-units --plain --failed -l --all + : TASK [Get journald] ************************************************************ task path: /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:209 Saturday 04 January 2025 11:36:20 -0500 (0:00:00.742) 0:03:16.968 ****** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.030996", "end": "2025-01-04 11:36:21.051059", "failed_when_result": true, "rc": 0, "start": "2025-01-04 11:36:21.020063" } STDOUT: Jan 04 11:31:55 managed-node2 systemd[23226]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Jan 04 11:31:55 managed-node2 systemd[23226]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Jan 04 11:31:55 managed-node2 systemd[23226]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Jan 04 11:31:55 managed-node2 systemd[23226]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Jan 04 11:31:55 managed-node2 systemd[23226]: Startup finished in 70ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 3001 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 70774 microseconds. Jan 04 11:31:55 managed-node2 systemd[1]: Started user@3001.service - User Manager for UID 3001. ░░ Subject: A start job for unit user@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has finished successfully. ░░ ░░ The job identifier is 1973. Jan 04 11:31:56 managed-node2 python3.12[23372]: ansible-file Invoked with path=/tmp/lsr_83t3r8ir_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:31:56 managed-node2 python3.12[23503]: ansible-file Invoked with path=/tmp/lsr_83t3r8ir_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:31:57 managed-node2 sudo[23676]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ukvatkplbrcszidibphkwttlsgmqtdhx ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736008317.157093-14273-171456620512807/AnsiballZ_podman_image.py' Jan 04 11:31:57 managed-node2 sudo[23676]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-23676) opened. Jan 04 11:31:57 managed-node2 sudo[23676]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 04 11:31:57 managed-node2 systemd[23226]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 18. Jan 04 11:31:57 managed-node2 systemd[23226]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 04 11:31:57 managed-node2 dbus-broker-launch[23700]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 04 11:31:57 managed-node2 dbus-broker-launch[23700]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 04 11:31:57 managed-node2 systemd[23226]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 04 11:31:57 managed-node2 dbus-broker-launch[23700]: Ready Jan 04 11:31:57 managed-node2 systemd[23226]: Created slice user.slice - Slice /user. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 20. Jan 04 11:31:57 managed-node2 systemd[23226]: Started podman-23686.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 19. Jan 04 11:31:57 managed-node2 systemd[23226]: Started podman-pause-020c7518.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 23. Jan 04 11:31:57 managed-node2 systemd[23226]: Started podman-23702.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 27. Jan 04 11:31:58 managed-node2 systemd[23226]: Started podman-23727.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 31. Jan 04 11:31:58 managed-node2 sudo[23676]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 04 11:31:59 managed-node2 python3.12[23864]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:31:59 managed-node2 python3.12[23995]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:31:59 managed-node2 python3.12[24126]: ansible-ansible.legacy.stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 04 11:32:00 managed-node2 python3.12[24231]: ansible-ansible.legacy.copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736008319.6432922-14384-218752844033904/.source.yml _original_basename=.pixe7uee follow=False checksum=1a8262735b4b21563f85a46c73b644d4dc2b00fd backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:32:00 managed-node2 sudo[24404]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-rxnpeowjkhqgrhucbnhrgolbojsqpxcl ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736008320.3666356-14423-205375948089438/AnsiballZ_podman_play.py' Jan 04 11:32:00 managed-node2 sudo[24404]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24404) opened. Jan 04 11:32:00 managed-node2 sudo[24404]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 04 11:32:00 managed-node2 python3.12[24407]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 04 11:32:00 managed-node2 systemd[23226]: Started podman-24414.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 35. Jan 04 11:32:01 managed-node2 systemd[23226]: Created slice user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice - cgroup user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 39. Jan 04 11:32:01 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6 Jan 04 11:32:01 managed-node2 systemd[23226]: Started rootless-netns-1c449f92.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 43. Jan 04 11:32:01 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Jan 04 11:32:01 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 04 11:32:01 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 04 11:32:01 managed-node2 kernel: veth0: entered allmulticast mode Jan 04 11:32:01 managed-node2 kernel: veth0: entered promiscuous mode Jan 04 11:32:01 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 04 11:32:01 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 04 11:32:01 managed-node2 systemd[23226]: Started run-r24cb90d5c5ab4923b0b6b9050b5850e9.scope - /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 47. Jan 04 11:32:01 managed-node2 aardvark-dns[24497]: starting aardvark on a child with pid 24498 Jan 04 11:32:01 managed-node2 aardvark-dns[24498]: Successfully parsed config Jan 04 11:32:01 managed-node2 aardvark-dns[24498]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Jan 04 11:32:01 managed-node2 aardvark-dns[24498]: Listen v6 ip {} Jan 04 11:32:01 managed-node2 aardvark-dns[24498]: Using the following upstream servers: [169.254.1.1:53, 10.29.169.13:53, 10.29.170.12:53] Jan 04 11:32:01 managed-node2 conmon[24515]: conmon 219b809a67b76f01c3e3 : failed to write to /proc/self/oom_score_adj: Permission denied Jan 04 11:32:01 managed-node2 systemd[23226]: Started libpod-conmon-219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 51. Jan 04 11:32:01 managed-node2 conmon[24516]: conmon 219b809a67b76f01c3e3 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach} Jan 04 11:32:01 managed-node2 conmon[24516]: conmon 219b809a67b76f01c3e3 : terminal_ctrl_fd: 14 Jan 04 11:32:01 managed-node2 conmon[24516]: conmon 219b809a67b76f01c3e3 : winsz read side: 17, winsz write side: 18 Jan 04 11:32:01 managed-node2 systemd[23226]: Started libpod-219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 56. Jan 04 11:32:01 managed-node2 conmon[24516]: conmon 219b809a67b76f01c3e3 : container PID: 24518 Jan 04 11:32:01 managed-node2 conmon[24520]: conmon a6beb024b5d86fca9df6 : failed to write to /proc/self/oom_score_adj: Permission denied Jan 04 11:32:01 managed-node2 systemd[23226]: Started libpod-conmon-a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 61. Jan 04 11:32:01 managed-node2 conmon[24521]: conmon a6beb024b5d86fca9df6 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Jan 04 11:32:01 managed-node2 conmon[24521]: conmon a6beb024b5d86fca9df6 : terminal_ctrl_fd: 13 Jan 04 11:32:01 managed-node2 conmon[24521]: conmon a6beb024b5d86fca9df6 : winsz read side: 16, winsz write side: 17 Jan 04 11:32:01 managed-node2 systemd[23226]: Started libpod-a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 66. Jan 04 11:32:01 managed-node2 conmon[24521]: conmon a6beb024b5d86fca9df6 : container PID: 24523 Jan 04 11:32:01 managed-node2 python3.12[24407]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jan 04 11:32:01 managed-node2 python3.12[24407]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: 4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e Container: a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079 Jan 04 11:32:01 managed-node2 python3.12[24407]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-01-04T11:32:00-05:00" level=info msg="/bin/podman filtering at log level debug" time="2025-01-04T11:32:00-05:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-01-04T11:32:00-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-01-04T11:32:00-05:00" level=info msg="Using sqlite as database backend" time="2025-01-04T11:32:00-05:00" level=debug msg="systemd-logind: Unknown object '/'." time="2025-01-04T11:32:00-05:00" level=debug msg="Using graph driver overlay" time="2025-01-04T11:32:00-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2025-01-04T11:32:00-05:00" level=debug msg="Using run root /run/user/3001/containers" time="2025-01-04T11:32:00-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2025-01-04T11:32:00-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2025-01-04T11:32:00-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2025-01-04T11:32:00-05:00" level=debug msg="Using transient store: false" time="2025-01-04T11:32:00-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-01-04T11:32:00-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-04T11:32:00-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-04T11:32:00-05:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2025-01-04T11:32:00-05:00" level=debug msg="Cached value indicated that native-diff is usable" time="2025-01-04T11:32:00-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2025-01-04T11:32:00-05:00" level=debug msg="Initializing event backend file" time="2025-01-04T11:32:00-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-01-04T11:32:00-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2025-01-04T11:32:00-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-01-04T11:32:00-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2025-01-04T11:32:00-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-01-04T11:32:00-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-01-04T11:32:00-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-01-04T11:32:00-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-01-04T11:32:00-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-01-04T11:32:00-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2025-01-04T11:32:00-05:00" level=info msg="Setting parallel job count to 7" time="2025-01-04T11:32:00-05:00" level=debug msg="Successfully loaded 1 networks" time="2025-01-04T11:32:00-05:00" level=debug msg="found free device name podman1" time="2025-01-04T11:32:00-05:00" level=debug msg="found free ipv4 network subnet 10.89.0.0/24" time="2025-01-04T11:32:00-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-04T11:32:00-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:00-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-04T11:32:00-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2025-01-04T11:32:00-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-04T11:32:00-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2025-01-04T11:32:00-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-04T11:32:00-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:00-05:00" level=debug msg="FROM \"scratch\"" time="2025-01-04T11:32:00-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2025-01-04T11:32:00-05:00" level=debug msg="Check for idmapped mounts support " time="2025-01-04T11:32:00-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:00-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:00-05:00" level=debug msg="overlay: test mount indicated that volatile is being used" time="2025-01-04T11:32:00-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/aa04371c3e894aed357c6cfc2570aff6fb7b7d23c9c6cc393c095fa95a1d69f0/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/aa04371c3e894aed357c6cfc2570aff6fb7b7d23c9c6cc393c095fa95a1d69f0/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/aa04371c3e894aed357c6cfc2570aff6fb7b7d23c9c6cc393c095fa95a1d69f0/work,userxattr,volatile,context=\"system_u:object_r:container_file_t:s0:c738,c970\"" time="2025-01-04T11:32:00-05:00" level=debug msg="Container ID: 13148fc10a35a48704ad04e509034933e5e7ceb07c1ddfb356ef2b60b21e5abf" time="2025-01-04T11:32:00-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}" time="2025-01-04T11:32:00-05:00" level=debug msg="COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\"\", Src:[]string{\"/usr/libexec/podman/catatonit\"}, Dest:\"/catatonit\", Download:false, Chown:\"\", Chmod:\"\", Checksum:\"\", Files:[]imagebuilder.File(nil), KeepGitDir:false, Link:false, Parents:false, Excludes:[]string(nil)}" time="2025-01-04T11:32:00-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"13148fc10a35a48704ad04e509034933e5e7ceb07c1ddfb356ef2b60b21e5abf\"" time="2025-01-04T11:32:00-05:00" level=debug msg="added content file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67" time="2025-01-04T11:32:00-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\"/catatonit\", \"-P\"]}" time="2025-01-04T11:32:00-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"13148fc10a35a48704ad04e509034933e5e7ceb07c1ddfb356ef2b60b21e5abf\"" time="2025-01-04T11:32:00-05:00" level=debug msg="COMMIT localhost/podman-pause:5.3.1-1733097600" time="2025-01-04T11:32:00-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-04T11:32:00-05:00" level=debug msg="COMMIT \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-04T11:32:00-05:00" level=debug msg="committing image with reference \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" is allowed by policy" time="2025-01-04T11:32:00-05:00" level=debug msg="layer list: [\"aa04371c3e894aed357c6cfc2570aff6fb7b7d23c9c6cc393c095fa95a1d69f0\"]" time="2025-01-04T11:32:00-05:00" level=debug msg="using \"/var/tmp/buildah1591127554\" to hold temporary data" time="2025-01-04T11:32:00-05:00" level=debug msg="Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/aa04371c3e894aed357c6cfc2570aff6fb7b7d23c9c6cc393c095fa95a1d69f0/diff" time="2025-01-04T11:32:00-05:00" level=debug msg="layer \"aa04371c3e894aed357c6cfc2570aff6fb7b7d23c9c6cc393c095fa95a1d69f0\" size is 699392 bytes, uncompressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6, possibly-compressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6" time="2025-01-04T11:32:00-05:00" level=debug msg="OCIv1 config = {\"created\":\"2025-01-04T16:32:00.970143613Z\",\"architecture\":\"amd64\",\"os\":\"linux\",\"config\":{\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Entrypoint\":[\"/catatonit\",\"-P\"],\"WorkingDir\":\"/\",\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2025-01-04T16:32:00.943891967Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-01-04T16:32:00.973741253Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-01-04T11:32:00-05:00" level=debug msg="OCIv1 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.oci.image.manifest.v1+json\",\"config\":{\"mediaType\":\"application/vnd.oci.image.config.v1+json\",\"digest\":\"sha256:0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0\",\"size\":685},\"layers\":[{\"mediaType\":\"application/vnd.oci.image.layer.v1.tar\",\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\",\"size\":699392}],\"annotations\":{\"org.opencontainers.image.base.digest\":\"\",\"org.opencontainers.image.base.name\":\"\"}}" time="2025-01-04T11:32:00-05:00" level=debug msg="Docker v2s2 config = {\"created\":\"2025-01-04T16:32:00.970143613Z\",\"container\":\"13148fc10a35a48704ad04e509034933e5e7ceb07c1ddfb356ef2b60b21e5abf\",\"container_config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"architecture\":\"amd64\",\"os\":\"linux\",\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2025-01-04T16:32:00.943891967Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-01-04T16:32:00.973741253Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-01-04T11:32:00-05:00" level=debug msg="Docker v2s2 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.docker.distribution.manifest.v2+json\",\"config\":{\"mediaType\":\"application/vnd.docker.container.image.v1+json\",\"size\":1348,\"digest\":\"sha256:c243c799d4fe34f33faeabe031c005a690cfa802239305f6e8f44718b82b5be0\"},\"layers\":[{\"mediaType\":\"application/vnd.docker.image.rootfs.diff.tar\",\"size\":699392,\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"}]}" time="2025-01-04T11:32:00-05:00" level=debug msg="Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite" time="2025-01-04T11:32:00-05:00" level=debug msg="IsRunningImageAllowed for image containers-storage:" time="2025-01-04T11:32:00-05:00" level=debug msg=" Using transport \"containers-storage\" policy section \"\"" time="2025-01-04T11:32:00-05:00" level=debug msg=" Requirement 0: allowed" time="2025-01-04T11:32:00-05:00" level=debug msg="Overall: allowed" time="2025-01-04T11:32:00-05:00" level=debug msg="start reading config" time="2025-01-04T11:32:00-05:00" level=debug msg="finished reading config" time="2025-01-04T11:32:00-05:00" level=debug msg="Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]" time="2025-01-04T11:32:00-05:00" level=debug msg="... will first try using the original manifest unmodified" time="2025-01-04T11:32:00-05:00" level=debug msg="Checking if we can reuse blob sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6: general substitution = true, compression for MIME type \"application/vnd.oci.image.layer.v1.tar\" = true" time="2025-01-04T11:32:00-05:00" level=debug msg="reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2025-01-04T11:32:00-05:00" level=debug msg="No compression detected" time="2025-01-04T11:32:00-05:00" level=debug msg="Using original blob without modification" time="2025-01-04T11:32:00-05:00" level=debug msg="Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6/diff" time="2025-01-04T11:32:01-05:00" level=debug msg="finished reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2025-01-04T11:32:01-05:00" level=debug msg="No compression detected" time="2025-01-04T11:32:01-05:00" level=debug msg="Compression change for blob sha256:0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0 (\"application/vnd.oci.image.config.v1+json\") not supported" time="2025-01-04T11:32:01-05:00" level=debug msg="Using original blob without modification" time="2025-01-04T11:32:01-05:00" level=debug msg="setting image creation date to 2025-01-04 16:32:00.970143613 +0000 UTC" time="2025-01-04T11:32:01-05:00" level=debug msg="created new image ID \"0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0\" with metadata \"{}\"" time="2025-01-04T11:32:01-05:00" level=debug msg="added name \"localhost/podman-pause:5.3.1-1733097600\" to image \"0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0\"" time="2025-01-04T11:32:01-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-04T11:32:01-05:00" level=debug msg="printing final image id \"0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Pod using bridge network mode" time="2025-01-04T11:32:01-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice for parent user.slice and name libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e" time="2025-01-04T11:32:01-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice" time="2025-01-04T11:32:01-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice" time="2025-01-04T11:32:01-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-04T11:32:01-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:01-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-04T11:32:01-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-04T11:32:01-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0)" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Inspecting image 0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Inspecting image 0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0" time="2025-01-04T11:32:01-05:00" level=debug msg="Inspecting image 0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0" time="2025-01-04T11:32:01-05:00" level=debug msg="using systemd mode: false" time="2025-01-04T11:32:01-05:00" level=debug msg="setting container name 4066b38bb5e8-infra" time="2025-01-04T11:32:01-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network aac903796a463b10472049d22d2ff923ce5881d61c354442af57df0f9acd781d bridge podman1 2025-01-04 11:32:00.808293245 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-01-04T11:32:01-05:00" level=debug msg="Successfully loaded 2 networks" time="2025-01-04T11:32:01-05:00" level=debug msg="Allocated lock 1 for container 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:0cec0ba6b70ee34915ec9825ae81de8f9e0bc64dc0c543c9eb8094c2d1a78cd0\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Created container \"219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Container \"219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14/userdata\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Container \"219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14\" has run directory \"/run/user/3001/containers/overlay-containers/219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14/userdata\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:01-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:01-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-04T11:32:01-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:01-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-01-04T11:32:01-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:01-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:01-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-04T11:32:01-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:01-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:01-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:01-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-04T11:32:01-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:01-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:01-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:01-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-04T11:32:01-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:01-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-04T11:32:01-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-04T11:32:01-05:00" level=debug msg="using systemd mode: false" time="2025-01-04T11:32:01-05:00" level=debug msg="adding container to pod httpd1" time="2025-01-04T11:32:01-05:00" level=debug msg="setting container name httpd1-httpd1" time="2025-01-04T11:32:01-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-04T11:32:01-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-01-04T11:32:01-05:00" level=debug msg="Adding mount /proc" time="2025-01-04T11:32:01-05:00" level=debug msg="Adding mount /dev" time="2025-01-04T11:32:01-05:00" level=debug msg="Adding mount /dev/pts" time="2025-01-04T11:32:01-05:00" level=debug msg="Adding mount /dev/mqueue" time="2025-01-04T11:32:01-05:00" level=debug msg="Adding mount /sys" time="2025-01-04T11:32:01-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-01-04T11:32:01-05:00" level=debug msg="Allocated lock 2 for container a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079" time="2025-01-04T11:32:01-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Created container \"a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Container \"a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079/userdata\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Container \"a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079\" has run directory \"/run/user/3001/containers/overlay-containers/a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079/userdata\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Strongconnecting node 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14" time="2025-01-04T11:32:01-05:00" level=debug msg="Pushed 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14 onto stack" time="2025-01-04T11:32:01-05:00" level=debug msg="Finishing node 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14. Popped 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14 off stack" time="2025-01-04T11:32:01-05:00" level=debug msg="Strongconnecting node a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079" time="2025-01-04T11:32:01-05:00" level=debug msg="Pushed a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079 onto stack" time="2025-01-04T11:32:01-05:00" level=debug msg="Finishing node a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079. Popped a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079 off stack" time="2025-01-04T11:32:01-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/R7JMOHH4XTSV55MZKCTRNIJDFC,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/e5c22c8b395272c734ae1c2b04ba2a8f0f7d50cc693da01faa29cf467d4dc28f/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/e5c22c8b395272c734ae1c2b04ba2a8f0f7d50cc693da01faa29cf467d4dc28f/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c57,c559\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Made network namespace at /run/user/3001/netns/netns-b90250f7-7c41-4bfe-993a-24596fd823d6 for container 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14" time="2025-01-04T11:32:01-05:00" level=debug msg="Mounted container \"219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/e5c22c8b395272c734ae1c2b04ba2a8f0f7d50cc693da01faa29cf467d4dc28f/merged\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Creating rootless network namespace at \"/run/user/3001/containers/networks/rootless-netns/rootless-netns\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Created root filesystem for container 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14 at /home/podman_basic_user/.local/share/containers/storage/overlay/e5c22c8b395272c734ae1c2b04ba2a8f0f7d50cc693da01faa29cf467d4dc28f/merged" time="2025-01-04T11:32:01-05:00" level=debug msg="pasta arguments: --config-net --pid /run/user/3001/containers/networks/rootless-netns/rootless-netns-conn.pid --dns-forward 169.254.1.1 -t none -u none -T none -U none --no-map-gw --quiet --netns /run/user/3001/containers/networks/rootless-netns/rootless-netns --map-guest-addr 169.254.1.2" time="2025-01-04T11:32:01-05:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [INFO netavark::firewall::nft] Creating container chain nv_aac90379_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "--user", "/usr/libexec/podman/aardvark-dns", "--config", "/run/user/3001/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "c2:a4:da:a9:fd:62", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2025-01-04T11:32:01-05:00" level=debug msg="rootlessport: time=\"2025-01-04T11:32:01-05:00\" level=info msg=\"Starting parent driver\"\n" time="2025-01-04T11:32:01-05:00" level=debug msg="rootlessport: time=\"2025-01-04T11:32:01-05:00\" level=info msg=\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport56212955/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport56212955/.bp.sock]\"\n" time="2025-01-04T11:32:01-05:00" level=debug msg="rootlessport: time=\"2025-01-04T11:32:01-05:00\" level=info msg=\"Starting child driver in child netns (\\\"/proc/self/exe\\\" [rootlessport-child])\"\n" time="2025-01-04T11:32:01-05:00" level=debug msg="rootlessport: time=\"2025-01-04T11:32:01-05:00\" level=info msg=\"Waiting for initComplete\"\n" time="2025-01-04T11:32:01-05:00" level=debug msg="rootlessport: time=\"2025-01-04T11:32:01-05:00\" level=info msg=\"initComplete is closed; parent and child established the communication channel\"\n" time="2025-01-04T11:32:01-05:00" level=debug msg="rootlessport: time=\"2025-01-04T11:32:01-05:00\" level=info msg=\"Exposing ports [{ 80 15001 1 tcp}]\"\n" time="2025-01-04T11:32:01-05:00" level=debug msg="rootlessport: time=\"2025-01-04T11:32:01-05:00\" level=info msg=Ready\n" time="2025-01-04T11:32:01-05:00" level=debug msg="rootlessport is ready" time="2025-01-04T11:32:01-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-04T11:32:01-05:00" level=debug msg="Setting Cgroups for container 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14 to user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice:libpod:219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14" time="2025-01-04T11:32:01-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-01-04T11:32:01-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/home/podman_basic_user/.local/share/containers/storage/overlay/e5c22c8b395272c734ae1c2b04ba2a8f0f7d50cc693da01faa29cf467d4dc28f/merged\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Created OCI spec for container 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14/userdata/config.json" time="2025-01-04T11:32:01-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice for parent user.slice and name libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e" time="2025-01-04T11:32:01-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice" time="2025-01-04T11:32:01-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice" time="2025-01-04T11:32:01-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-01-04T11:32:01-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14 -u 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14 -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14/userdata -p /run/user/3001/containers/overlay-containers/219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14/userdata/pidfile -n 4066b38bb5e8-infra --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14 --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14]" time="2025-01-04T11:32:01-05:00" level=info msg="Running conmon under slice user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice and unitName libpod-conmon-219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-01-04T11:32:01-05:00" level=debug msg="Received: 24518" time="2025-01-04T11:32:01-05:00" level=info msg="Got Conmon PID as 24516" time="2025-01-04T11:32:01-05:00" level=debug msg="Created container 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14 in OCI runtime" time="2025-01-04T11:32:01-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-01-04T11:32:01-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-01-04T11:32:01-05:00" level=debug msg="Starting container 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14 with command [/catatonit -P]" time="2025-01-04T11:32:01-05:00" level=debug msg="Started container 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14" time="2025-01-04T11:32:01-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/7GE4TEETLFYRITOUFT43LDAT5V,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/bb5bb6e04d077cff776259eea1e86734ad761d83e0bbb003ce41a2d225d9c3a4/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/bb5bb6e04d077cff776259eea1e86734ad761d83e0bbb003ce41a2d225d9c3a4/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c57,c559\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Mounted container \"a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/bb5bb6e04d077cff776259eea1e86734ad761d83e0bbb003ce41a2d225d9c3a4/merged\"" time="2025-01-04T11:32:01-05:00" level=debug msg="Created root filesystem for container a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079 at /home/podman_basic_user/.local/share/containers/storage/overlay/bb5bb6e04d077cff776259eea1e86734ad761d83e0bbb003ce41a2d225d9c3a4/merged" time="2025-01-04T11:32:01-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-04T11:32:01-05:00" level=debug msg="Setting Cgroups for container a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079 to user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice:libpod:a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079" time="2025-01-04T11:32:01-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-01-04T11:32:01-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-01-04T11:32:01-05:00" level=debug msg="Created OCI spec for container a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079 at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079/userdata/config.json" time="2025-01-04T11:32:01-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice for parent user.slice and name libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e" time="2025-01-04T11:32:01-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice" time="2025-01-04T11:32:01-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice" time="2025-01-04T11:32:01-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-01-04T11:32:01-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079 -u a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079 -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079/userdata -p /run/user/3001/containers/overlay-containers/a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079/userdata/pidfile -n httpd1-httpd1 --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079 --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079]" time="2025-01-04T11:32:01-05:00" level=info msg="Running conmon under slice user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice and unitName libpod-conmon-a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-01-04T11:32:01-05:00" level=debug msg="Received: 24523" time="2025-01-04T11:32:01-05:00" level=info msg="Got Conmon PID as 24521" time="2025-01-04T11:32:01-05:00" level=debug msg="Created container a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079 in OCI runtime" time="2025-01-04T11:32:01-05:00" level=debug msg="Starting container a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079 with command [/bin/busybox-extras httpd -f -p 80]" time="2025-01-04T11:32:01-05:00" level=debug msg="Started container a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079" time="2025-01-04T11:32:01-05:00" level=debug msg="Called kube.PersistentPostRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-01-04T11:32:01-05:00" level=debug msg="Shutting down engines" time="2025-01-04T11:32:01-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=24414 Jan 04 11:32:01 managed-node2 python3.12[24407]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jan 04 11:32:01 managed-node2 sudo[24404]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 04 11:32:01 managed-node2 sudo[24697]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zjxdatdlmcvreiirvpracgetnfazsrtv ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736008321.735781-14482-279456464405913/AnsiballZ_systemd.py' Jan 04 11:32:01 managed-node2 sudo[24697]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24697) opened. Jan 04 11:32:02 managed-node2 sudo[24697]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 04 11:32:02 managed-node2 python3.12[24700]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:32:02 managed-node2 systemd[23226]: Reload requested from client PID 24701 ('systemctl')... Jan 04 11:32:02 managed-node2 systemd[23226]: Reloading... Jan 04 11:32:02 managed-node2 systemd[23226]: Reloading finished in 45 ms. Jan 04 11:32:02 managed-node2 sudo[24697]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 04 11:32:02 managed-node2 sudo[24884]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cbzuxxgmxfkujbixziuexqothcnmvbki ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736008322.485175-14525-121423282136574/AnsiballZ_systemd.py' Jan 04 11:32:02 managed-node2 sudo[24884]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-24884) opened. Jan 04 11:32:02 managed-node2 sudo[24884]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 04 11:32:02 managed-node2 python3.12[24887]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Jan 04 11:32:02 managed-node2 systemd[23226]: Reload requested from client PID 24890 ('systemctl')... Jan 04 11:32:02 managed-node2 systemd[23226]: Reloading... Jan 04 11:32:03 managed-node2 systemd[23226]: Reloading finished in 43 ms. Jan 04 11:32:03 managed-node2 sudo[24884]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 04 11:32:03 managed-node2 sudo[25073]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cokjvrtefywltxriyjuvpxsvgvsdtezw ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736008323.2257214-14564-59845127645483/AnsiballZ_systemd.py' Jan 04 11:32:03 managed-node2 sudo[25073]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-25073) opened. Jan 04 11:32:03 managed-node2 sudo[25073]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 04 11:32:03 managed-node2 python3.12[25076]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 04 11:32:03 managed-node2 systemd[23226]: Created slice app-podman\x2dkube.slice - Slice /app/podman-kube. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 83. Jan 04 11:32:03 managed-node2 systemd[23226]: Starting podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 71. Jan 04 11:32:03 managed-node2 aardvark-dns[24498]: Received SIGHUP Jan 04 11:32:03 managed-node2 aardvark-dns[24498]: Successfully parsed config Jan 04 11:32:03 managed-node2 aardvark-dns[24498]: Listen v4 ip {} Jan 04 11:32:03 managed-node2 aardvark-dns[24498]: Listen v6 ip {} Jan 04 11:32:03 managed-node2 aardvark-dns[24498]: No configuration found stopping the sever Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14)" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=info msg="Using sqlite as database backend" Jan 04 11:32:03 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 04 11:32:03 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 04 11:32:03 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 04 11:32:03 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="systemd-logind: Unknown object '/'." Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Using graph driver overlay" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Using run root /run/user/3001/containers" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Using transient store: false" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Cached value indicated that native-diff is usable" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Initializing event backend file" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=info msg="Setting parallel job count to 7" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only 219b809a67b76f01c3e3881752bb67fcd036e270e6191f5d0378890e70e9ec14)" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=debug msg="Shutting down engines" Jan 04 11:32:03 managed-node2 /usr/bin/podman[25091]: time="2025-01-04T11:32:03-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=25091 Jan 04 11:32:13 managed-node2 podman[25079]: time="2025-01-04T11:32:13-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd1-httpd1 in 10 seconds, resorting to SIGKILL" Jan 04 11:32:13 managed-node2 conmon[24521]: conmon a6beb024b5d86fca9df6 : container 24523 exited with status 137 Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079)" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=info msg="Using sqlite as database backend" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="systemd-logind: Unknown object '/'." Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Using graph driver overlay" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Using run root /run/user/3001/containers" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Using transient store: false" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Cached value indicated that native-diff is usable" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Initializing event backend file" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=info msg="Setting parallel job count to 7" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079)" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=debug msg="Shutting down engines" Jan 04 11:32:13 managed-node2 /usr/bin/podman[25108]: time="2025-01-04T11:32:13-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=25108 Jan 04 11:32:13 managed-node2 systemd[23226]: Stopped libpod-conmon-a6beb024b5d86fca9df6b92689fd9c71211581058e68de7c2da0969a8958d079.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 85 and the job result is done. Jan 04 11:32:13 managed-node2 systemd[23226]: Removed slice user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice - cgroup user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 84 and the job result is done. Jan 04 11:32:13 managed-node2 systemd[23226]: user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e.slice: No such file or directory Jan 04 11:32:13 managed-node2 podman[25079]: Pods stopped: Jan 04 11:32:13 managed-node2 podman[25079]: 4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e Jan 04 11:32:13 managed-node2 podman[25079]: Pods removed: Jan 04 11:32:13 managed-node2 podman[25079]: 4066b38bb5e83adbc88c3e20a312fd424c758190dad9b82bd2b30368f7cb948e Jan 04 11:32:13 managed-node2 podman[25079]: Secrets removed: Jan 04 11:32:13 managed-node2 podman[25079]: Volumes removed: Jan 04 11:32:13 managed-node2 systemd[23226]: Created slice user-libpod_pod_fe903ad2ec2383adc1a4b72155ae3452d6d7505e7c9633f4ca79e65d6dc06b07.slice - cgroup user-libpod_pod_fe903ad2ec2383adc1a4b72155ae3452d6d7505e7c9633f4ca79e65d6dc06b07.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 86. Jan 04 11:32:14 managed-node2 systemd[23226]: Started libpod-bf4229278cbf719d6d75f11751a80a284dd3256cefd415e800ab5f5cdc883e59.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 90. Jan 04 11:32:14 managed-node2 systemd[23226]: Started rootless-netns-4d92c901.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 94. Jan 04 11:32:14 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 04 11:32:14 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 04 11:32:14 managed-node2 kernel: veth0: entered allmulticast mode Jan 04 11:32:14 managed-node2 kernel: veth0: entered promiscuous mode Jan 04 11:32:14 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 04 11:32:14 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 04 11:32:14 managed-node2 systemd[23226]: Started run-rf8300fa8dbe44c88a447b2bb61cb38c6.scope - /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 98. Jan 04 11:32:14 managed-node2 systemd[23226]: Started libpod-2d741e0e82710b800fa6f0694418630efa8b8cb2b0ee333b2427e20d818729bb.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 102. Jan 04 11:32:14 managed-node2 systemd[23226]: Started libpod-af7f2944597961c213714ce5393fe4d7a79783462defc3e71f2197e0c64d6129.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 107. Jan 04 11:32:14 managed-node2 podman[25079]: Pod: Jan 04 11:32:14 managed-node2 podman[25079]: fe903ad2ec2383adc1a4b72155ae3452d6d7505e7c9633f4ca79e65d6dc06b07 Jan 04 11:32:14 managed-node2 podman[25079]: Container: Jan 04 11:32:14 managed-node2 podman[25079]: af7f2944597961c213714ce5393fe4d7a79783462defc3e71f2197e0c64d6129 Jan 04 11:32:14 managed-node2 systemd[23226]: Started podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 71. Jan 04 11:32:14 managed-node2 sudo[25073]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 04 11:32:15 managed-node2 python3.12[25298]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 04 11:32:15 managed-node2 python3.12[25430]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:32:17 managed-node2 python3.12[25563]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:32:18 managed-node2 python3.12[25695]: ansible-file Invoked with path=/tmp/lsr_83t3r8ir_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:32:19 managed-node2 python3.12[25826]: ansible-file Invoked with path=/tmp/lsr_83t3r8ir_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:32:19 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 04 11:32:19 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 04 11:32:20 managed-node2 podman[25988]: 2025-01-04 11:32:20.623434175 -0500 EST m=+0.771239383 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 04 11:32:20 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 04 11:32:21 managed-node2 python3.12[26134]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:32:21 managed-node2 python3.12[26265]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:32:21 managed-node2 python3.12[26396]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 04 11:32:22 managed-node2 python3.12[26501]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736008341.6730418-15415-21223707228100/.source.yml _original_basename=._e6nowhb follow=False checksum=d85a9207eebdee136a70d7cab8884e5a6c38780a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:32:22 managed-node2 python3.12[26632]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 04 11:32:22 managed-node2 systemd[1]: Created slice machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice - cgroup machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice. ░░ Subject: A start job for unit machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice has finished successfully. ░░ ░░ The job identifier is 2057. Jan 04 11:32:22 managed-node2 podman[26639]: 2025-01-04 11:32:22.896568746 -0500 EST m=+0.069621115 container create 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 (image=localhost/podman-pause:5.3.1-1733097600, name=ce4a4f29304b-infra, pod_id=ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a, io.buildah.version=1.38.0) Jan 04 11:32:22 managed-node2 podman[26639]: 2025-01-04 11:32:22.903197319 -0500 EST m=+0.076249656 pod create ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a (image=, name=httpd2) Jan 04 11:32:22 managed-node2 podman[26639]: 2025-01-04 11:32:22.931209649 -0500 EST m=+0.104261986 container create b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test) Jan 04 11:32:22 managed-node2 podman[26639]: 2025-01-04 11:32:22.905669156 -0500 EST m=+0.078721561 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 04 11:32:22 managed-node2 NetworkManager[784]: [1736008342.9478] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Jan 04 11:32:22 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 04 11:32:22 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 04 11:32:22 managed-node2 kernel: veth0: entered allmulticast mode Jan 04 11:32:22 managed-node2 kernel: veth0: entered promiscuous mode Jan 04 11:32:22 managed-node2 NetworkManager[784]: [1736008342.9582] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Jan 04 11:32:22 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 04 11:32:22 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 04 11:32:22 managed-node2 NetworkManager[784]: [1736008342.9616] device (veth0): carrier: link connected Jan 04 11:32:22 managed-node2 NetworkManager[784]: [1736008342.9620] device (podman1): carrier: link connected Jan 04 11:32:22 managed-node2 (udev-worker)[26654]: Network interface NamePolicy= disabled on kernel command line. Jan 04 11:32:22 managed-node2 (udev-worker)[26653]: Network interface NamePolicy= disabled on kernel command line. Jan 04 11:32:23 managed-node2 NetworkManager[784]: [1736008343.0105] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 04 11:32:23 managed-node2 NetworkManager[784]: [1736008343.0111] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 04 11:32:23 managed-node2 NetworkManager[784]: [1736008343.0164] device (podman1): Activation: starting connection 'podman1' (546e9f42-6299-437b-9679-89347f4cb263) Jan 04 11:32:23 managed-node2 NetworkManager[784]: [1736008343.0192] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 04 11:32:23 managed-node2 NetworkManager[784]: [1736008343.0195] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 04 11:32:23 managed-node2 NetworkManager[784]: [1736008343.0196] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 04 11:32:23 managed-node2 NetworkManager[784]: [1736008343.0200] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 04 11:32:23 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2064. Jan 04 11:32:23 managed-node2 systemd[1]: Started run-r619f906cfc804e95b41ee657f03b0cc1.scope - /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-r619f906cfc804e95b41ee657f03b0cc1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r619f906cfc804e95b41ee657f03b0cc1.scope has finished successfully. ░░ ░░ The job identifier is 2143. Jan 04 11:32:23 managed-node2 aardvark-dns[26678]: starting aardvark on a child with pid 26683 Jan 04 11:32:23 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2064. Jan 04 11:32:23 managed-node2 aardvark-dns[26683]: Successfully parsed config Jan 04 11:32:23 managed-node2 aardvark-dns[26683]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Jan 04 11:32:23 managed-node2 aardvark-dns[26683]: Listen v6 ip {} Jan 04 11:32:23 managed-node2 aardvark-dns[26683]: Using the following upstream servers: [10.29.169.13:53, 10.29.170.12:53, 10.2.32.1:53] Jan 04 11:32:23 managed-node2 NetworkManager[784]: [1736008343.0656] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 04 11:32:23 managed-node2 NetworkManager[784]: [1736008343.0665] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 04 11:32:23 managed-node2 NetworkManager[784]: [1736008343.0669] device (podman1): Activation: successful, device activated. Jan 04 11:32:23 managed-node2 systemd[1]: Started libpod-conmon-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19.scope. ░░ Subject: A start job for unit libpod-conmon-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19.scope has finished successfully. ░░ ░░ The job identifier is 2149. Jan 04 11:32:23 managed-node2 conmon[26694]: conmon 72a074f2c404d4bcdd6f : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/13/attach} Jan 04 11:32:23 managed-node2 conmon[26694]: conmon 72a074f2c404d4bcdd6f : terminal_ctrl_fd: 13 Jan 04 11:32:23 managed-node2 conmon[26694]: conmon 72a074f2c404d4bcdd6f : winsz read side: 17, winsz write side: 18 Jan 04 11:32:23 managed-node2 systemd[1]: Started libpod-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19.scope - libcrun container. ░░ Subject: A start job for unit libpod-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19.scope has finished successfully. ░░ ░░ The job identifier is 2156. Jan 04 11:32:23 managed-node2 conmon[26694]: conmon 72a074f2c404d4bcdd6f : container PID: 26696 Jan 04 11:32:23 managed-node2 podman[26639]: 2025-01-04 11:32:23.140855181 -0500 EST m=+0.313907674 container init 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 (image=localhost/podman-pause:5.3.1-1733097600, name=ce4a4f29304b-infra, pod_id=ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a, io.buildah.version=1.38.0) Jan 04 11:32:23 managed-node2 podman[26639]: 2025-01-04 11:32:23.143340029 -0500 EST m=+0.316392511 container start 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 (image=localhost/podman-pause:5.3.1-1733097600, name=ce4a4f29304b-infra, pod_id=ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a, io.buildah.version=1.38.0) Jan 04 11:32:23 managed-node2 systemd[1]: Started libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope. ░░ Subject: A start job for unit libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope has finished successfully. ░░ ░░ The job identifier is 2163. Jan 04 11:32:23 managed-node2 conmon[26700]: conmon b51a9b2b65af4d9c04e3 : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach} Jan 04 11:32:23 managed-node2 conmon[26700]: conmon b51a9b2b65af4d9c04e3 : terminal_ctrl_fd: 12 Jan 04 11:32:23 managed-node2 conmon[26700]: conmon b51a9b2b65af4d9c04e3 : winsz read side: 16, winsz write side: 17 Jan 04 11:32:23 managed-node2 systemd[1]: Started libpod-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope - libcrun container. ░░ Subject: A start job for unit libpod-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope has finished successfully. ░░ ░░ The job identifier is 2170. Jan 04 11:32:23 managed-node2 conmon[26700]: conmon b51a9b2b65af4d9c04e3 : container PID: 26702 Jan 04 11:32:23 managed-node2 podman[26639]: 2025-01-04 11:32:23.200713794 -0500 EST m=+0.373766192 container init b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Jan 04 11:32:23 managed-node2 podman[26639]: 2025-01-04 11:32:23.203240701 -0500 EST m=+0.376293260 container start b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jan 04 11:32:23 managed-node2 podman[26639]: 2025-01-04 11:32:23.209148556 -0500 EST m=+0.382200935 pod start ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a (image=, name=httpd2) Jan 04 11:32:23 managed-node2 python3.12[26632]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Jan 04 11:32:23 managed-node2 python3.12[26632]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a Container: b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 Jan 04 11:32:23 managed-node2 python3.12[26632]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-01-04T11:32:22-05:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2025-01-04T11:32:22-05:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-01-04T11:32:22-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-01-04T11:32:22-05:00" level=info msg="Using sqlite as database backend" time="2025-01-04T11:32:22-05:00" level=debug msg="Using graph driver overlay" time="2025-01-04T11:32:22-05:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Using run root /run/containers/storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2025-01-04T11:32:22-05:00" level=debug msg="Using tmp dir /run/libpod" time="2025-01-04T11:32:22-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2025-01-04T11:32:22-05:00" level=debug msg="Using transient store: false" time="2025-01-04T11:32:22-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-04T11:32:22-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-04T11:32:22-05:00" level=debug msg="Cached value indicated that metacopy is being used" time="2025-01-04T11:32:22-05:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2025-01-04T11:32:22-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2025-01-04T11:32:22-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2025-01-04T11:32:22-05:00" level=debug msg="Initializing event backend journald" time="2025-01-04T11:32:22-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2025-01-04T11:32:22-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-01-04T11:32:22-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-01-04T11:32:22-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-01-04T11:32:22-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-01-04T11:32:22-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-01-04T11:32:22-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2025-01-04T11:32:22-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-01-04T11:32:22-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-01-04T11:32:22-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2025-01-04T11:32:22-05:00" level=info msg="Setting parallel job count to 7" time="2025-01-04T11:32:22-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network abeb9dad79b870d27d9a2e0372462704dea67bd90414ac62e777bb7e3f0f98fc bridge podman1 2025-01-04 11:30:09.084162695 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-01-04T11:32:22-05:00" level=debug msg="Successfully loaded 2 networks" time="2025-01-04T11:32:22-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:22-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-04T11:32:22-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994)" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Pod using bridge network mode" time="2025-01-04T11:32:22-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice for parent machine.slice and name libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a" time="2025-01-04T11:32:22-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice" time="2025-01-04T11:32:22-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice" time="2025-01-04T11:32:22-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:22-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-04T11:32:22-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994)" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Inspecting image c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Inspecting image c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994" time="2025-01-04T11:32:22-05:00" level=debug msg="Inspecting image c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994" time="2025-01-04T11:32:22-05:00" level=debug msg="using systemd mode: false" time="2025-01-04T11:32:22-05:00" level=debug msg="setting container name ce4a4f29304b-infra" time="2025-01-04T11:32:22-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Allocated lock 1 for container 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:c95b7a4fdf055ab0d873b648e6a8df27e25d358f3b00d002652aee04d59f7994\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are supported" time="2025-01-04T11:32:22-05:00" level=debug msg="Created container \"72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Container \"72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19\" has work directory \"/var/lib/containers/storage/overlay-containers/72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19/userdata\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Container \"72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19\" has run directory \"/run/containers/storage/overlay-containers/72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19/userdata\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:22-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-04T11:32:22-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-01-04T11:32:22-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:22-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-04T11:32:22-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:22-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-04T11:32:22-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-04T11:32:22-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-04T11:32:22-05:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-04T11:32:22-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-04T11:32:22-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-04T11:32:22-05:00" level=debug msg="using systemd mode: false" time="2025-01-04T11:32:22-05:00" level=debug msg="adding container to pod httpd2" time="2025-01-04T11:32:22-05:00" level=debug msg="setting container name httpd2-httpd2" time="2025-01-04T11:32:22-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-04T11:32:22-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-01-04T11:32:22-05:00" level=debug msg="Adding mount /proc" time="2025-01-04T11:32:22-05:00" level=debug msg="Adding mount /dev" time="2025-01-04T11:32:22-05:00" level=debug msg="Adding mount /dev/pts" time="2025-01-04T11:32:22-05:00" level=debug msg="Adding mount /dev/mqueue" time="2025-01-04T11:32:22-05:00" level=debug msg="Adding mount /sys" time="2025-01-04T11:32:22-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-01-04T11:32:22-05:00" level=debug msg="Allocated lock 2 for container b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026" time="2025-01-04T11:32:22-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Created container \"b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Container \"b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026\" has work directory \"/var/lib/containers/storage/overlay-containers/b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026/userdata\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Container \"b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026\" has run directory \"/run/containers/storage/overlay-containers/b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026/userdata\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Strongconnecting node 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19" time="2025-01-04T11:32:22-05:00" level=debug msg="Pushed 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 onto stack" time="2025-01-04T11:32:22-05:00" level=debug msg="Finishing node 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19. Popped 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 off stack" time="2025-01-04T11:32:22-05:00" level=debug msg="Strongconnecting node b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026" time="2025-01-04T11:32:22-05:00" level=debug msg="Pushed b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 onto stack" time="2025-01-04T11:32:22-05:00" level=debug msg="Finishing node b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026. Popped b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 off stack" time="2025-01-04T11:32:22-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/TQOTJL77U4SKGQOWPJFZ5KETQN,upperdir=/var/lib/containers/storage/overlay/04acd098cd25fb66ec04f609cc7975a552b6156d7496ae563fab2f0431094053/diff,workdir=/var/lib/containers/storage/overlay/04acd098cd25fb66ec04f609cc7975a552b6156d7496ae563fab2f0431094053/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c285,c966\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Mounted container \"72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19\" at \"/var/lib/containers/storage/overlay/04acd098cd25fb66ec04f609cc7975a552b6156d7496ae563fab2f0431094053/merged\"" time="2025-01-04T11:32:22-05:00" level=debug msg="Created root filesystem for container 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 at /var/lib/containers/storage/overlay/04acd098cd25fb66ec04f609cc7975a552b6156d7496ae563fab2f0431094053/merged" time="2025-01-04T11:32:22-05:00" level=debug msg="Made network namespace at /run/netns/netns-a4cf92d5-1e89-ffca-31b5-4f80cdedf748 for container 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [DEBUG netavark::firewall::firewalld] Adding firewalld rules for network 10.89.0.0/24 [DEBUG netavark::firewall::firewalld] Adding subnet 10.89.0.0/24 to zone trusted as source [INFO netavark::firewall::nft] Creating container chain nv_abeb9dad_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "/usr/libexec/podman/aardvark-dns", "--config", "/run/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "6a:fb:ed:cc:d3:eb", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2025-01-04T11:32:23-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-04T11:32:23-05:00" level=debug msg="Setting Cgroups for container 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 to machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice:libpod:72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19" time="2025-01-04T11:32:23-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-01-04T11:32:23-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/var/lib/containers/storage/overlay/04acd098cd25fb66ec04f609cc7975a552b6156d7496ae563fab2f0431094053/merged\"" time="2025-01-04T11:32:23-05:00" level=debug msg="Created OCI spec for container 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 at /var/lib/containers/storage/overlay-containers/72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19/userdata/config.json" time="2025-01-04T11:32:23-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice for parent machine.slice and name libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a" time="2025-01-04T11:32:23-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice" time="2025-01-04T11:32:23-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice" time="2025-01-04T11:32:23-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-01-04T11:32:23-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 -u 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19/userdata -p /run/containers/storage/overlay-containers/72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19/userdata/pidfile -n ce4a4f29304b-infra --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19]" time="2025-01-04T11:32:23-05:00" level=info msg="Running conmon under slice machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice and unitName libpod-conmon-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19.scope" time="2025-01-04T11:32:23-05:00" level=debug msg="Received: 26696" time="2025-01-04T11:32:23-05:00" level=info msg="Got Conmon PID as 26694" time="2025-01-04T11:32:23-05:00" level=debug msg="Created container 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 in OCI runtime" time="2025-01-04T11:32:23-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-01-04T11:32:23-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-01-04T11:32:23-05:00" level=debug msg="Starting container 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 with command [/catatonit -P]" time="2025-01-04T11:32:23-05:00" level=debug msg="Started container 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19" time="2025-01-04T11:32:23-05:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/QUTKPFALPWKAMIPWEKEV6F457D,upperdir=/var/lib/containers/storage/overlay/284396ee3df2b9d4d6a5f1cba6ed473c8f134be852dee9a535fc7b15dcb509bf/diff,workdir=/var/lib/containers/storage/overlay/284396ee3df2b9d4d6a5f1cba6ed473c8f134be852dee9a535fc7b15dcb509bf/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c285,c966\"" time="2025-01-04T11:32:23-05:00" level=debug msg="Mounted container \"b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026\" at \"/var/lib/containers/storage/overlay/284396ee3df2b9d4d6a5f1cba6ed473c8f134be852dee9a535fc7b15dcb509bf/merged\"" time="2025-01-04T11:32:23-05:00" level=debug msg="Created root filesystem for container b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 at /var/lib/containers/storage/overlay/284396ee3df2b9d4d6a5f1cba6ed473c8f134be852dee9a535fc7b15dcb509bf/merged" time="2025-01-04T11:32:23-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-04T11:32:23-05:00" level=debug msg="Setting Cgroups for container b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 to machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice:libpod:b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026" time="2025-01-04T11:32:23-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-01-04T11:32:23-05:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-01-04T11:32:23-05:00" level=debug msg="Created OCI spec for container b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 at /var/lib/containers/storage/overlay-containers/b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026/userdata/config.json" time="2025-01-04T11:32:23-05:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice for parent machine.slice and name libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a" time="2025-01-04T11:32:23-05:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice" time="2025-01-04T11:32:23-05:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice" time="2025-01-04T11:32:23-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-01-04T11:32:23-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 -u b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026/userdata -p /run/containers/storage/overlay-containers/b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 --full-attach -s -l k8s-file:/var/lib/containers/storage/overlay-containers/b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026]" time="2025-01-04T11:32:23-05:00" level=info msg="Running conmon under slice machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice and unitName libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope" time="2025-01-04T11:32:23-05:00" level=debug msg="Received: 26702" time="2025-01-04T11:32:23-05:00" level=info msg="Got Conmon PID as 26700" time="2025-01-04T11:32:23-05:00" level=debug msg="Created container b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 in OCI runtime" time="2025-01-04T11:32:23-05:00" level=debug msg="Starting container b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 with command [/bin/busybox-extras httpd -f -p 80]" time="2025-01-04T11:32:23-05:00" level=debug msg="Started container b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026" time="2025-01-04T11:32:23-05:00" level=debug msg="Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-01-04T11:32:23-05:00" level=debug msg="Shutting down engines" time="2025-01-04T11:32:23-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=26639 Jan 04 11:32:23 managed-node2 python3.12[26632]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jan 04 11:32:23 managed-node2 python3.12[26834]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:32:23 managed-node2 systemd[1]: Reload requested from client PID 26835 ('systemctl') (unit session-5.scope)... Jan 04 11:32:23 managed-node2 systemd[1]: Reloading... Jan 04 11:32:23 managed-node2 systemd[1]: Reloading finished in 204 ms. Jan 04 11:32:24 managed-node2 python3.12[27023]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Jan 04 11:32:24 managed-node2 systemd[1]: Reload requested from client PID 27026 ('systemctl') (unit session-5.scope)... Jan 04 11:32:24 managed-node2 systemd[1]: Reloading... Jan 04 11:32:24 managed-node2 systemd[1]: Reloading finished in 198 ms. Jan 04 11:32:25 managed-node2 python3.12[27213]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 04 11:32:25 managed-node2 systemd[1]: Created slice system-podman\x2dkube.slice - Slice /system/podman-kube. ░░ Subject: A start job for unit system-podman\x2dkube.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit system-podman\x2dkube.slice has finished successfully. ░░ ░░ The job identifier is 2255. Jan 04 11:32:25 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution. ░░ ░░ The job identifier is 2177. Jan 04 11:32:25 managed-node2 podman[27217]: 2025-01-04 11:32:25.50935022 -0500 EST m=+0.024088006 pod stop ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a (image=, name=httpd2) Jan 04 11:32:25 managed-node2 systemd[1]: libpod-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19.scope has successfully entered the 'dead' state. Jan 04 11:32:25 managed-node2 podman[27217]: 2025-01-04 11:32:25.539790672 -0500 EST m=+0.054528283 container died 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 (image=localhost/podman-pause:5.3.1-1733097600, name=ce4a4f29304b-infra, io.buildah.version=1.38.0) Jan 04 11:32:25 managed-node2 aardvark-dns[26683]: Received SIGHUP Jan 04 11:32:25 managed-node2 aardvark-dns[26683]: Successfully parsed config Jan 04 11:32:25 managed-node2 aardvark-dns[26683]: Listen v4 ip {} Jan 04 11:32:25 managed-node2 aardvark-dns[26683]: Listen v6 ip {} Jan 04 11:32:25 managed-node2 aardvark-dns[26683]: No configuration found stopping the sever Jan 04 11:32:25 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 04 11:32:25 managed-node2 systemd[1]: run-r619f906cfc804e95b41ee657f03b0cc1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r619f906cfc804e95b41ee657f03b0cc1.scope has successfully entered the 'dead' state. Jan 04 11:32:25 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 04 11:32:25 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 04 11:32:25 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19)" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=info msg="Using sqlite as database backend" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Using graph driver overlay" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Using run root /run/containers/storage" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Using tmp dir /run/libpod" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Using transient store: false" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Cached value indicated that metacopy is being used" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Initializing event backend journald" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=info msg="Setting parallel job count to 7" Jan 04 11:32:25 managed-node2 NetworkManager[784]: [1736008345.5771] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 04 11:32:25 managed-node2 systemd[1]: run-netns-netns\x2da4cf92d5\x2d1e89\x2dffca\x2d31b5\x2d4f80cdedf748.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2da4cf92d5\x2d1e89\x2dffca\x2d31b5\x2d4f80cdedf748.mount has successfully entered the 'dead' state. Jan 04 11:32:25 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19-userdata-shm.mount has successfully entered the 'dead' state. Jan 04 11:32:25 managed-node2 systemd[1]: var-lib-containers-storage-overlay-04acd098cd25fb66ec04f609cc7975a552b6156d7496ae563fab2f0431094053-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-04acd098cd25fb66ec04f609cc7975a552b6156d7496ae563fab2f0431094053-merged.mount has successfully entered the 'dead' state. Jan 04 11:32:25 managed-node2 podman[27217]: 2025-01-04 11:32:25.655948729 -0500 EST m=+0.170686278 container cleanup 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 (image=localhost/podman-pause:5.3.1-1733097600, name=ce4a4f29304b-infra, pod_id=ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a, io.buildah.version=1.38.0) Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19)" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=debug msg="Shutting down engines" Jan 04 11:32:25 managed-node2 /usr/bin/podman[27229]: time="2025-01-04T11:32:25-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=27229 Jan 04 11:32:25 managed-node2 systemd[1]: libpod-conmon-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19.scope has successfully entered the 'dead' state. Jan 04 11:32:27 managed-node2 systemd[4487]: Created slice background.slice - User Background Tasks Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Jan 04 11:32:27 managed-node2 systemd[4487]: Starting systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 04 11:32:27 managed-node2 systemd[4487]: Finished systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 04 11:32:35 managed-node2 podman[27217]: time="2025-01-04T11:32:35-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL" Jan 04 11:32:35 managed-node2 conmon[26700]: conmon b51a9b2b65af4d9c04e3 : container 26702 exited with status 137 Jan 04 11:32:35 managed-node2 systemd[1]: libpod-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope has successfully entered the 'dead' state. Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.538086285 -0500 EST m=+10.052824129 container died b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026)" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=info msg="Using sqlite as database backend" Jan 04 11:32:35 managed-node2 systemd[1]: var-lib-containers-storage-overlay-284396ee3df2b9d4d6a5f1cba6ed473c8f134be852dee9a535fc7b15dcb509bf-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-284396ee3df2b9d4d6a5f1cba6ed473c8f134be852dee9a535fc7b15dcb509bf-merged.mount has successfully entered the 'dead' state. Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Using graph driver overlay" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Using graph root /var/lib/containers/storage" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Using run root /run/containers/storage" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Using tmp dir /run/libpod" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Using transient store: false" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Cached value indicated that metacopy is being used" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Cached value indicated that native-diff is not being used" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Initializing event backend journald" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=info msg="Setting parallel job count to 7" Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.576277326 -0500 EST m=+10.091014877 container cleanup b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=info msg="Received shutdown signal \"terminated\", terminating!" PID=27253 Jan 04 11:32:35 managed-node2 systemd[1]: Stopping libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope... ░░ Subject: A stop job for unit libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope has begun execution. ░░ ░░ The job identifier is 2263. Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=info msg="Invoking shutdown handler \"libpod\"" PID=27253 Jan 04 11:32:35 managed-node2 /usr/bin/podman[27253]: time="2025-01-04T11:32:35-05:00" level=debug msg="Completed shutdown handler \"libpod\", duration 0s" PID=27253 Jan 04 11:32:35 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 04 11:32:35 managed-node2 systemd[1]: libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope has successfully entered the 'dead' state. Jan 04 11:32:35 managed-node2 systemd[1]: Stopped libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope. ░░ Subject: A stop job for unit libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026.scope has finished. ░░ ░░ The job identifier is 2263 and the job result is done. Jan 04 11:32:35 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 04 11:32:35 managed-node2 systemd[1]: Removed slice machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice - cgroup machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice. ░░ Subject: A stop job for unit machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice has finished. ░░ ░░ The job identifier is 2262 and the job result is done. Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.6383497 -0500 EST m=+10.153087243 container remove b51a9b2b65af4d9c04e3a246d3416ebfb67c1f9a0f1c721b0eb97f82ae918026 (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.665355307 -0500 EST m=+10.180092861 container remove 72a074f2c404d4bcdd6f990ac60668e02566b622e4e281669d48ae10434d4b19 (image=localhost/podman-pause:5.3.1-1733097600, name=ce4a4f29304b-infra, pod_id=ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a, io.buildah.version=1.38.0) Jan 04 11:32:35 managed-node2 systemd[1]: machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice: Failed to open /run/systemd/transient/machine-libpod_pod_ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a.slice: No such file or directory Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.674688605 -0500 EST m=+10.189426110 pod remove ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a (image=, name=httpd2) Jan 04 11:32:35 managed-node2 podman[27217]: Pods stopped: Jan 04 11:32:35 managed-node2 podman[27217]: ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a Jan 04 11:32:35 managed-node2 podman[27217]: Pods removed: Jan 04 11:32:35 managed-node2 podman[27217]: ce4a4f29304bb0660fe3d2d15416888d2e7455ef87926002b94a52a0c3df9f2a Jan 04 11:32:35 managed-node2 podman[27217]: Secrets removed: Jan 04 11:32:35 managed-node2 podman[27217]: Volumes removed: Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.699007794 -0500 EST m=+10.213745409 container create 23724cd2b98cf70cd15894b6500ab24bc5f44b0ed864a28ddbed6859c312ed76 (image=localhost/podman-pause:5.3.1-1733097600, name=c8c001b59877-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 04 11:32:35 managed-node2 systemd[1]: Created slice machine-libpod_pod_ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f.slice - cgroup machine-libpod_pod_ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f.slice. ░░ Subject: A start job for unit machine-libpod_pod_ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f.slice has finished successfully. ░░ ░░ The job identifier is 2264. Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.743858656 -0500 EST m=+10.258596283 container create dbc2b9607a4ca55a5a7069f06017b2c67ca7a59d57bf39bfedfb115ed16ec91f (image=localhost/podman-pause:5.3.1-1733097600, name=ecaac7b287a8-infra, pod_id=ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.749872146 -0500 EST m=+10.264609684 pod create ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f (image=, name=httpd2) Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.751842762 -0500 EST m=+10.266580444 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.774790691 -0500 EST m=+10.289528239 container create f27f043c4df71017c1bd967698eecbb37685099825c3c94f42622aa2493af37f (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test) Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.775085864 -0500 EST m=+10.289823419 container restart 23724cd2b98cf70cd15894b6500ab24bc5f44b0ed864a28ddbed6859c312ed76 (image=localhost/podman-pause:5.3.1-1733097600, name=c8c001b59877-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 04 11:32:35 managed-node2 systemd[1]: Started libpod-23724cd2b98cf70cd15894b6500ab24bc5f44b0ed864a28ddbed6859c312ed76.scope - libcrun container. ░░ Subject: A start job for unit libpod-23724cd2b98cf70cd15894b6500ab24bc5f44b0ed864a28ddbed6859c312ed76.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-23724cd2b98cf70cd15894b6500ab24bc5f44b0ed864a28ddbed6859c312ed76.scope has finished successfully. ░░ ░░ The job identifier is 2270. Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.845833248 -0500 EST m=+10.360570912 container init 23724cd2b98cf70cd15894b6500ab24bc5f44b0ed864a28ddbed6859c312ed76 (image=localhost/podman-pause:5.3.1-1733097600, name=c8c001b59877-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 04 11:32:35 managed-node2 podman[27217]: 2025-01-04 11:32:35.848320069 -0500 EST m=+10.363057650 container start 23724cd2b98cf70cd15894b6500ab24bc5f44b0ed864a28ddbed6859c312ed76 (image=localhost/podman-pause:5.3.1-1733097600, name=c8c001b59877-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.8609] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/5) Jan 04 11:32:35 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 04 11:32:35 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 04 11:32:35 managed-node2 kernel: veth0: entered allmulticast mode Jan 04 11:32:35 managed-node2 kernel: veth0: entered promiscuous mode Jan 04 11:32:35 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 04 11:32:35 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.8728] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.8736] device (veth0): carrier: link connected Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.8741] device (podman1): carrier: link connected Jan 04 11:32:35 managed-node2 (udev-worker)[27272]: Network interface NamePolicy= disabled on kernel command line. Jan 04 11:32:35 managed-node2 (udev-worker)[27273]: Network interface NamePolicy= disabled on kernel command line. Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.9377] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.9395] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.9453] device (podman1): Activation: starting connection 'podman1' (502e50cd-fa9a-4804-aa44-3de28f6105b8) Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.9455] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.9457] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.9459] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.9461] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 04 11:32:35 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2277. Jan 04 11:32:35 managed-node2 systemd[1]: Started run-r472bc244f245414093ac26f7e0451412.scope - /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-r472bc244f245414093ac26f7e0451412.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r472bc244f245414093ac26f7e0451412.scope has finished successfully. ░░ ░░ The job identifier is 2356. Jan 04 11:32:35 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2277. Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.9931] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.9935] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 04 11:32:35 managed-node2 NetworkManager[784]: [1736008355.9943] device (podman1): Activation: successful, device activated. Jan 04 11:32:36 managed-node2 systemd[1]: Started libpod-dbc2b9607a4ca55a5a7069f06017b2c67ca7a59d57bf39bfedfb115ed16ec91f.scope - libcrun container. ░░ Subject: A start job for unit libpod-dbc2b9607a4ca55a5a7069f06017b2c67ca7a59d57bf39bfedfb115ed16ec91f.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-dbc2b9607a4ca55a5a7069f06017b2c67ca7a59d57bf39bfedfb115ed16ec91f.scope has finished successfully. ░░ ░░ The job identifier is 2362. Jan 04 11:32:36 managed-node2 podman[27217]: 2025-01-04 11:32:36.026319901 -0500 EST m=+10.541057567 container init dbc2b9607a4ca55a5a7069f06017b2c67ca7a59d57bf39bfedfb115ed16ec91f (image=localhost/podman-pause:5.3.1-1733097600, name=ecaac7b287a8-infra, pod_id=ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, io.buildah.version=1.38.0) Jan 04 11:32:36 managed-node2 podman[27217]: 2025-01-04 11:32:36.028993156 -0500 EST m=+10.543730773 container start dbc2b9607a4ca55a5a7069f06017b2c67ca7a59d57bf39bfedfb115ed16ec91f (image=localhost/podman-pause:5.3.1-1733097600, name=ecaac7b287a8-infra, pod_id=ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, io.buildah.version=1.38.0) Jan 04 11:32:36 managed-node2 systemd[1]: Started libpod-f27f043c4df71017c1bd967698eecbb37685099825c3c94f42622aa2493af37f.scope - libcrun container. ░░ Subject: A start job for unit libpod-f27f043c4df71017c1bd967698eecbb37685099825c3c94f42622aa2493af37f.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-f27f043c4df71017c1bd967698eecbb37685099825c3c94f42622aa2493af37f.scope has finished successfully. ░░ ░░ The job identifier is 2369. Jan 04 11:32:36 managed-node2 podman[27217]: 2025-01-04 11:32:36.073620242 -0500 EST m=+10.588357802 container init f27f043c4df71017c1bd967698eecbb37685099825c3c94f42622aa2493af37f (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jan 04 11:32:36 managed-node2 podman[27217]: 2025-01-04 11:32:36.07582633 -0500 EST m=+10.590563926 container start f27f043c4df71017c1bd967698eecbb37685099825c3c94f42622aa2493af37f (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jan 04 11:32:36 managed-node2 podman[27217]: 2025-01-04 11:32:36.081743026 -0500 EST m=+10.596480566 pod start ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f (image=, name=httpd2) Jan 04 11:32:36 managed-node2 podman[27217]: Pod: Jan 04 11:32:36 managed-node2 podman[27217]: ecaac7b287a8d6f741bca1619f477c32162bd62f52c5b5e3212d4c371a417e5f Jan 04 11:32:36 managed-node2 podman[27217]: Container: Jan 04 11:32:36 managed-node2 podman[27217]: f27f043c4df71017c1bd967698eecbb37685099825c3c94f42622aa2493af37f Jan 04 11:32:36 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully. ░░ ░░ The job identifier is 2177. Jan 04 11:32:36 managed-node2 python3.12[27453]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:32:38 managed-node2 python3.12[27586]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:32:39 managed-node2 python3.12[27718]: ansible-file Invoked with path=/tmp/lsr_83t3r8ir_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:32:39 managed-node2 python3.12[27849]: ansible-file Invoked with path=/tmp/lsr_83t3r8ir_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:32:41 managed-node2 podman[28009]: 2025-01-04 11:32:41.355986126 -0500 EST m=+1.236275522 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 04 11:32:41 managed-node2 python3.12[28154]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:32:42 managed-node2 python3.12[28285]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:32:42 managed-node2 python3.12[28416]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 04 11:32:42 managed-node2 python3.12[28521]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736008362.3190482-16268-253131673991454/.source.yml _original_basename=.pwzt0a85 follow=False checksum=afa449bfcb99ed71cd6828d236f90b0be6428b4e backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:32:43 managed-node2 python3.12[28652]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 04 11:32:43 managed-node2 systemd[1]: Created slice machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice - cgroup machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice. ░░ Subject: A start job for unit machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice has finished successfully. ░░ ░░ The job identifier is 2376. Jan 04 11:32:43 managed-node2 podman[28659]: 2025-01-04 11:32:43.448073385 -0500 EST m=+0.051064137 container create 862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873 (image=localhost/podman-pause:5.3.1-1733097600, name=5c8b39a78908-infra, pod_id=5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3, io.buildah.version=1.38.0) Jan 04 11:32:43 managed-node2 podman[28659]: 2025-01-04 11:32:43.451231682 -0500 EST m=+0.054222484 pod create 5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3 (image=, name=httpd3) Jan 04 11:32:43 managed-node2 podman[28659]: 2025-01-04 11:32:43.47299563 -0500 EST m=+0.075986392 container create f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jan 04 11:32:43 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jan 04 11:32:43 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jan 04 11:32:43 managed-node2 kernel: veth1: entered allmulticast mode Jan 04 11:32:43 managed-node2 kernel: veth1: entered promiscuous mode Jan 04 11:32:43 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jan 04 11:32:43 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Jan 04 11:32:43 managed-node2 NetworkManager[784]: [1736008363.5000] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/7) Jan 04 11:32:43 managed-node2 NetworkManager[784]: [1736008363.5015] device (veth1): carrier: link connected Jan 04 11:32:43 managed-node2 podman[28659]: 2025-01-04 11:32:43.453461035 -0500 EST m=+0.056452036 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 04 11:32:43 managed-node2 (udev-worker)[28676]: Network interface NamePolicy= disabled on kernel command line. Jan 04 11:32:43 managed-node2 systemd[1]: Started libpod-conmon-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873.scope. ░░ Subject: A start job for unit libpod-conmon-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873.scope has finished successfully. ░░ ░░ The job identifier is 2383. Jan 04 11:32:43 managed-node2 systemd[1]: Started libpod-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873.scope - libcrun container. ░░ Subject: A start job for unit libpod-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873.scope has finished successfully. ░░ ░░ The job identifier is 2390. Jan 04 11:32:43 managed-node2 podman[28659]: 2025-01-04 11:32:43.602871064 -0500 EST m=+0.205861976 container init 862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873 (image=localhost/podman-pause:5.3.1-1733097600, name=5c8b39a78908-infra, pod_id=5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3, io.buildah.version=1.38.0) Jan 04 11:32:43 managed-node2 podman[28659]: 2025-01-04 11:32:43.605779897 -0500 EST m=+0.208770719 container start 862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873 (image=localhost/podman-pause:5.3.1-1733097600, name=5c8b39a78908-infra, pod_id=5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3, io.buildah.version=1.38.0) Jan 04 11:32:43 managed-node2 systemd[1]: Started libpod-conmon-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope. ░░ Subject: A start job for unit libpod-conmon-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope has finished successfully. ░░ ░░ The job identifier is 2397. Jan 04 11:32:43 managed-node2 systemd[1]: Started libpod-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope - libcrun container. ░░ Subject: A start job for unit libpod-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope has finished successfully. ░░ ░░ The job identifier is 2404. Jan 04 11:32:43 managed-node2 podman[28659]: 2025-01-04 11:32:43.654110202 -0500 EST m=+0.257101117 container init f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z) Jan 04 11:32:43 managed-node2 podman[28659]: 2025-01-04 11:32:43.656617666 -0500 EST m=+0.259608549 container start f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Jan 04 11:32:43 managed-node2 podman[28659]: 2025-01-04 11:32:43.659914457 -0500 EST m=+0.262905222 pod start 5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3 (image=, name=httpd3) Jan 04 11:32:44 managed-node2 python3.12[28837]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:32:44 managed-node2 systemd[1]: Reload requested from client PID 28838 ('systemctl') (unit session-5.scope)... Jan 04 11:32:44 managed-node2 systemd[1]: Reloading... Jan 04 11:32:44 managed-node2 systemd[1]: Reloading finished in 217 ms. Jan 04 11:32:45 managed-node2 python3.12[29024]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Jan 04 11:32:45 managed-node2 systemd[1]: Reload requested from client PID 29027 ('systemctl') (unit session-5.scope)... Jan 04 11:32:45 managed-node2 systemd[1]: Reloading... Jan 04 11:32:45 managed-node2 systemd[1]: Reloading finished in 214 ms. Jan 04 11:32:46 managed-node2 python3.12[29214]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 04 11:32:46 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 04 11:32:46 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution. ░░ ░░ The job identifier is 2411. Jan 04 11:32:46 managed-node2 podman[29219]: 2025-01-04 11:32:46.077838962 -0500 EST m=+0.023800799 pod stop 5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3 (image=, name=httpd3) Jan 04 11:32:46 managed-node2 podman[29219]: 2025-01-04 11:32:46.089786817 -0500 EST m=+0.035748685 container stop 862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873 (image=localhost/podman-pause:5.3.1-1733097600, name=5c8b39a78908-infra, pod_id=5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3, io.buildah.version=1.38.0) Jan 04 11:32:46 managed-node2 systemd[1]: libpod-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873.scope has successfully entered the 'dead' state. Jan 04 11:32:46 managed-node2 podman[29219]: 2025-01-04 11:32:46.096689652 -0500 EST m=+0.042651405 container died 862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873 (image=localhost/podman-pause:5.3.1-1733097600, name=5c8b39a78908-infra, io.buildah.version=1.38.0) Jan 04 11:32:46 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jan 04 11:32:46 managed-node2 kernel: veth1 (unregistering): left allmulticast mode Jan 04 11:32:46 managed-node2 kernel: veth1 (unregistering): left promiscuous mode Jan 04 11:32:46 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jan 04 11:32:46 managed-node2 systemd[1]: run-netns-netns\x2d76f641b4\x2d14c6\x2d7f71\x2debb7\x2d7b4c05b4a12e.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d76f641b4\x2d14c6\x2d7f71\x2debb7\x2d7b4c05b4a12e.mount has successfully entered the 'dead' state. Jan 04 11:32:46 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873-userdata-shm.mount has successfully entered the 'dead' state. Jan 04 11:32:46 managed-node2 systemd[1]: var-lib-containers-storage-overlay-640391f965bcd034ea649da3273b1a8b45e0901c17f5d62b4a666838f5a073d8-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-640391f965bcd034ea649da3273b1a8b45e0901c17f5d62b4a666838f5a073d8-merged.mount has successfully entered the 'dead' state. Jan 04 11:32:46 managed-node2 podman[29219]: 2025-01-04 11:32:46.159251578 -0500 EST m=+0.105213282 container cleanup 862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873 (image=localhost/podman-pause:5.3.1-1733097600, name=5c8b39a78908-infra, pod_id=5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3, io.buildah.version=1.38.0) Jan 04 11:32:46 managed-node2 systemd[1]: libpod-conmon-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873.scope has successfully entered the 'dead' state. Jan 04 11:32:56 managed-node2 podman[29219]: time="2025-01-04T11:32:56-05:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL" Jan 04 11:32:56 managed-node2 systemd[1]: libpod-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope has successfully entered the 'dead' state. Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.10190978 -0500 EST m=+10.047871602 container died f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 04 11:32:56 managed-node2 systemd[1]: var-lib-containers-storage-overlay-1a58fa6cb4e9ad7e43e7ef37f8f31a0f90a4fd6ed89e272e0786932b8faf7aee-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-1a58fa6cb4e9ad7e43e7ef37f8f31a0f90a4fd6ed89e272e0786932b8faf7aee-merged.mount has successfully entered the 'dead' state. Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.127890519 -0500 EST m=+10.073852732 container cleanup f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jan 04 11:32:56 managed-node2 systemd[1]: Stopping libpod-conmon-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope... ░░ Subject: A stop job for unit libpod-conmon-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope has begun execution. ░░ ░░ The job identifier is 2497. Jan 04 11:32:56 managed-node2 systemd[1]: libpod-conmon-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope has successfully entered the 'dead' state. Jan 04 11:32:56 managed-node2 systemd[1]: Stopped libpod-conmon-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope. ░░ Subject: A stop job for unit libpod-conmon-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5.scope has finished. ░░ ░░ The job identifier is 2497 and the job result is done. Jan 04 11:32:56 managed-node2 systemd[1]: Removed slice machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice - cgroup machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice. ░░ Subject: A stop job for unit machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice has finished. ░░ ░░ The job identifier is 2496 and the job result is done. Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.13903142 -0500 EST m=+10.084993237 pod stop 5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3 (image=, name=httpd3) Jan 04 11:32:56 managed-node2 systemd[1]: machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice: Failed to open /run/systemd/transient/machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice: No such file or directory Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.143655673 -0500 EST m=+10.089617517 pod stop 5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3 (image=, name=httpd3) Jan 04 11:32:56 managed-node2 systemd[1]: machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice: Failed to open /run/systemd/transient/machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice: No such file or directory Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.162026263 -0500 EST m=+10.107988003 container remove f396a7a96dfb9538f1bf10ffcc16c81770f5bd655ffe0e86d37452754610c0d5 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.178862859 -0500 EST m=+10.124824592 container remove 862b588c7994589af90685f4f16b9f9a8c9ecfec263c923420d1cc1f285d8873 (image=localhost/podman-pause:5.3.1-1733097600, name=5c8b39a78908-infra, pod_id=5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3, io.buildah.version=1.38.0) Jan 04 11:32:56 managed-node2 systemd[1]: machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice: Failed to open /run/systemd/transient/machine-libpod_pod_5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3.slice: No such file or directory Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.185623785 -0500 EST m=+10.131585491 pod remove 5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3 (image=, name=httpd3) Jan 04 11:32:56 managed-node2 podman[29219]: Pods stopped: Jan 04 11:32:56 managed-node2 podman[29219]: 5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3 Jan 04 11:32:56 managed-node2 podman[29219]: Pods removed: Jan 04 11:32:56 managed-node2 podman[29219]: 5c8b39a78908fd693b77e346f0a856f5b1ca333d142c8929d245ff1727af9cc3 Jan 04 11:32:56 managed-node2 podman[29219]: Secrets removed: Jan 04 11:32:56 managed-node2 podman[29219]: Volumes removed: Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.208822895 -0500 EST m=+10.154784618 container create ea65b4c142a1ecc4c5acde13aead0deeca5cc4c6b5b06f3925f8cc41f46c26ab (image=localhost/podman-pause:5.3.1-1733097600, name=8c5a350fe43d-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 04 11:32:56 managed-node2 systemd[1]: Created slice machine-libpod_pod_a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d.slice - cgroup machine-libpod_pod_a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d.slice. ░░ Subject: A start job for unit machine-libpod_pod_a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d.slice has finished successfully. ░░ ░░ The job identifier is 2498. Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.237772636 -0500 EST m=+10.183734449 container create 196161f20f831fa75ecc72ab533fa66e7dcf1074574fe2c3143d784148c1543a (image=localhost/podman-pause:5.3.1-1733097600, name=a93b7ede24ca-infra, pod_id=a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, io.buildah.version=1.38.0) Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.241831508 -0500 EST m=+10.187793210 pod create a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d (image=, name=httpd3) Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.262804828 -0500 EST m=+10.208766641 container create cba46159e385d5b6de190f637934a5720bac746a6ef2ab1573d80109f1bbe7f1 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry) Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.263133474 -0500 EST m=+10.209095217 container restart ea65b4c142a1ecc4c5acde13aead0deeca5cc4c6b5b06f3925f8cc41f46c26ab (image=localhost/podman-pause:5.3.1-1733097600, name=8c5a350fe43d-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 04 11:32:56 managed-node2 systemd[1]: Started libpod-ea65b4c142a1ecc4c5acde13aead0deeca5cc4c6b5b06f3925f8cc41f46c26ab.scope - libcrun container. ░░ Subject: A start job for unit libpod-ea65b4c142a1ecc4c5acde13aead0deeca5cc4c6b5b06f3925f8cc41f46c26ab.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-ea65b4c142a1ecc4c5acde13aead0deeca5cc4c6b5b06f3925f8cc41f46c26ab.scope has finished successfully. ░░ ░░ The job identifier is 2504. Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.306577989 -0500 EST m=+10.252539792 container init ea65b4c142a1ecc4c5acde13aead0deeca5cc4c6b5b06f3925f8cc41f46c26ab (image=localhost/podman-pause:5.3.1-1733097600, name=8c5a350fe43d-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.308869348 -0500 EST m=+10.254831125 container start ea65b4c142a1ecc4c5acde13aead0deeca5cc4c6b5b06f3925f8cc41f46c26ab (image=localhost/podman-pause:5.3.1-1733097600, name=8c5a350fe43d-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.243893918 -0500 EST m=+10.189855760 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 04 11:32:56 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jan 04 11:32:56 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jan 04 11:32:56 managed-node2 kernel: veth1: entered allmulticast mode Jan 04 11:32:56 managed-node2 kernel: veth1: entered promiscuous mode Jan 04 11:32:56 managed-node2 NetworkManager[784]: [1736008376.3388] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/8) Jan 04 11:32:56 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jan 04 11:32:56 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Jan 04 11:32:56 managed-node2 NetworkManager[784]: [1736008376.3445] device (veth1): carrier: link connected Jan 04 11:32:56 managed-node2 (udev-worker)[29263]: Network interface NamePolicy= disabled on kernel command line. Jan 04 11:32:56 managed-node2 systemd[1]: Started libpod-196161f20f831fa75ecc72ab533fa66e7dcf1074574fe2c3143d784148c1543a.scope - libcrun container. ░░ Subject: A start job for unit libpod-196161f20f831fa75ecc72ab533fa66e7dcf1074574fe2c3143d784148c1543a.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-196161f20f831fa75ecc72ab533fa66e7dcf1074574fe2c3143d784148c1543a.scope has finished successfully. ░░ ░░ The job identifier is 2511. Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.43783887 -0500 EST m=+10.383800650 container init 196161f20f831fa75ecc72ab533fa66e7dcf1074574fe2c3143d784148c1543a (image=localhost/podman-pause:5.3.1-1733097600, name=a93b7ede24ca-infra, pod_id=a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, io.buildah.version=1.38.0) Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.440103958 -0500 EST m=+10.386065808 container start 196161f20f831fa75ecc72ab533fa66e7dcf1074574fe2c3143d784148c1543a (image=localhost/podman-pause:5.3.1-1733097600, name=a93b7ede24ca-infra, pod_id=a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, io.buildah.version=1.38.0) Jan 04 11:32:56 managed-node2 systemd[1]: Started libpod-cba46159e385d5b6de190f637934a5720bac746a6ef2ab1573d80109f1bbe7f1.scope - libcrun container. ░░ Subject: A start job for unit libpod-cba46159e385d5b6de190f637934a5720bac746a6ef2ab1573d80109f1bbe7f1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-cba46159e385d5b6de190f637934a5720bac746a6ef2ab1573d80109f1bbe7f1.scope has finished successfully. ░░ ░░ The job identifier is 2518. Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.472293597 -0500 EST m=+10.418255355 container init cba46159e385d5b6de190f637934a5720bac746a6ef2ab1573d80109f1bbe7f1 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.474487097 -0500 EST m=+10.420449136 container start cba46159e385d5b6de190f637934a5720bac746a6ef2ab1573d80109f1bbe7f1 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jan 04 11:32:56 managed-node2 podman[29219]: 2025-01-04 11:32:56.477489212 -0500 EST m=+10.423450951 pod start a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d (image=, name=httpd3) Jan 04 11:32:56 managed-node2 podman[29219]: Pod: Jan 04 11:32:56 managed-node2 podman[29219]: a93b7ede24ca41ba847ce3cf58cc729dffc746701987f6d92abdaf7b8833d80d Jan 04 11:32:56 managed-node2 podman[29219]: Container: Jan 04 11:32:56 managed-node2 podman[29219]: cba46159e385d5b6de190f637934a5720bac746a6ef2ab1573d80109f1bbe7f1 Jan 04 11:32:56 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully. ░░ ░░ The job identifier is 2411. Jan 04 11:32:57 managed-node2 sudo[29469]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zxufrowjytjuwrcddqoyejjgfpptwedl ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1736008377.0993202-16884-78670723418786/AnsiballZ_command.py' Jan 04 11:32:57 managed-node2 sudo[29469]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-29469) opened. Jan 04 11:32:57 managed-node2 sudo[29469]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 04 11:32:57 managed-node2 python3.12[29472]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:32:57 managed-node2 systemd[23226]: Started podman-29480.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 112. Jan 04 11:32:57 managed-node2 sudo[29469]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 04 11:32:57 managed-node2 python3.12[29620]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:32:58 managed-node2 python3.12[29759]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:32:58 managed-node2 sudo[29940]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ktbgkgbcavztiutgtmblxyqskehvuryj ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736008378.5466173-16967-12676392522251/AnsiballZ_command.py' Jan 04 11:32:58 managed-node2 sudo[29940]: pam_systemd(sudo:session): New sd-bus connection (system-bus-pam-systemd-29940) opened. Jan 04 11:32:58 managed-node2 sudo[29940]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 04 11:32:58 managed-node2 python3.12[29943]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:32:58 managed-node2 sudo[29940]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 04 11:32:59 managed-node2 python3.12[30077]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:32:59 managed-node2 python3.12[30211]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:33:00 managed-node2 python3.12[30345]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:00 managed-node2 python3.12[30478]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:01 managed-node2 python3.12[30609]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:33:01 managed-node2 python3.12[30741]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:02 managed-node2 python3.12[30872]: ansible-file Invoked with path=/tmp/lsr_83t3r8ir_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:04 managed-node2 python3.12[31046]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jan 04 11:33:05 managed-node2 python3.12[31219]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:33:06 managed-node2 python3.12[31350]: ansible-ansible.legacy.dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 04 11:33:08 managed-node2 python3.12[31486]: ansible-ansible.legacy.dnf Invoked with name=['certmonger', 'python3-packaging'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 04 11:33:09 managed-node2 dbus-broker-launch[633]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 04 11:33:09 managed-node2 dbus-broker-launch[23700]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 04 11:33:09 managed-node2 dbus-broker-launch[633]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 04 11:33:09 managed-node2 dbus-broker-launch[23700]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 04 11:33:09 managed-node2 dbus-broker-launch[23700]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 04 11:33:09 managed-node2 dbus-broker-launch[633]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 04 11:33:09 managed-node2 dbus-broker-launch[23700]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 04 11:33:09 managed-node2 dbus-broker-launch[23700]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 04 11:33:09 managed-node2 dbus-broker-launch[23700]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 04 11:33:09 managed-node2 systemd[1]: Reload requested from client PID 31494 ('systemctl') (unit session-5.scope)... Jan 04 11:33:09 managed-node2 systemd[1]: Reloading... Jan 04 11:33:09 managed-node2 systemd[1]: Reloading finished in 213 ms. Jan 04 11:33:10 managed-node2 systemd[1]: Started run-rea937341daa74dcebd9e99f7e419d30e.service - /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-rea937341daa74dcebd9e99f7e419d30e.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-rea937341daa74dcebd9e99f7e419d30e.service has finished successfully. ░░ ░░ The job identifier is 2529. Jan 04 11:33:10 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 2607. Jan 04 11:33:10 managed-node2 systemd[1]: Reload requested from client PID 31557 ('systemctl') (unit session-5.scope)... Jan 04 11:33:10 managed-node2 systemd[1]: Reloading... Jan 04 11:33:10 managed-node2 systemd[1]: Reloading finished in 345 ms. Jan 04 11:33:10 managed-node2 systemd[1]: Queuing reload/restart jobs for marked units… Jan 04 11:33:11 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jan 04 11:33:11 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 2607. Jan 04 11:33:11 managed-node2 systemd[1]: run-rea937341daa74dcebd9e99f7e419d30e.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-rea937341daa74dcebd9e99f7e419d30e.service has successfully entered the 'dead' state. Jan 04 11:33:11 managed-node2 python3.12[31750]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:12 managed-node2 python3.12[31881]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:13 managed-node2 python3.12[32012]: ansible-ansible.legacy.systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 04 11:33:13 managed-node2 systemd[1]: Reload requested from client PID 32015 ('systemctl') (unit session-5.scope)... Jan 04 11:33:13 managed-node2 systemd[1]: Reloading... Jan 04 11:33:13 managed-node2 systemd[1]: Reloading finished in 210 ms. Jan 04 11:33:13 managed-node2 systemd[1]: Starting certmonger.service - Certificate monitoring and PKI enrollment... ░░ Subject: A start job for unit certmonger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has begun execution. ░░ ░░ The job identifier is 2685. Jan 04 11:33:13 managed-node2 (rtmonger)[32072]: certmonger.service: Referenced but unset environment variable evaluates to an empty string: OPTS Jan 04 11:33:13 managed-node2 systemd[1]: Started certmonger.service - Certificate monitoring and PKI enrollment. ░░ Subject: A start job for unit certmonger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has finished successfully. ░░ ░░ The job identifier is 2685. Jan 04 11:33:14 managed-node2 python3.12[32230]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=# # Ansible managed # # system_role:certificate provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 rsyslogd[662]: imjournal: journal files changed, reloading... [v8.2408.0-2.el10 try https://www.rsyslog.com/e/0 ] Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 certmonger[32246]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved. Jan 04 11:33:14 managed-node2 certmonger[32072]: 2025-01-04 11:33:14 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:14 managed-node2 python3.12[32377]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jan 04 11:33:15 managed-node2 python3.12[32508]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key Jan 04 11:33:15 managed-node2 python3.12[32639]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jan 04 11:33:16 managed-node2 python3.12[32770]: ansible-ansible.legacy.command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:33:16 managed-node2 certmonger[32072]: 2025-01-04 11:33:16 [32072] Wrote to /var/lib/certmonger/requests/20250104163314 Jan 04 11:33:16 managed-node2 python3.12[32902]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:17 managed-node2 python3.12[33033]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:17 managed-node2 python3.12[33164]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:18 managed-node2 python3.12[33295]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:33:18 managed-node2 python3.12[33426]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:33:20 managed-node2 python3.12[33688]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:33:21 managed-node2 python3.12[33825]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 04 11:33:22 managed-node2 python3.12[33957]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:33:24 managed-node2 python3.12[34090]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:33:24 managed-node2 python3.12[34221]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:33:25 managed-node2 python3.12[34352]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 04 11:33:26 managed-node2 python3.12[34484]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 04 11:33:26 managed-node2 python3.12[34617]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 04 11:33:27 managed-node2 python3.12[34750]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 04 11:33:28 managed-node2 python3.12[34881]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 04 11:33:33 managed-node2 python3.12[35488]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:33:34 managed-node2 python3.12[35621]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:34 managed-node2 python3.12[35752]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 04 11:33:35 managed-node2 python3.12[35857]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736008414.7326126-18841-108046773840670/.source.network dest=/etc/containers/systemd/quadlet-demo.network owner=root group=0 mode=0644 _original_basename=quadlet-demo.network follow=False checksum=e57c08d49aff4bae8daab138d913aeddaa8682a0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:36 managed-node2 python3.12[35988]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:33:36 managed-node2 systemd[1]: Reload requested from client PID 35989 ('systemctl') (unit session-5.scope)... Jan 04 11:33:36 managed-node2 systemd[1]: Reloading... Jan 04 11:33:36 managed-node2 systemd[1]: Reloading finished in 213 ms. Jan 04 11:33:36 managed-node2 python3.12[36175]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 04 11:33:36 managed-node2 systemd[1]: Starting quadlet-demo-network.service... ░░ Subject: A start job for unit quadlet-demo-network.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has begun execution. ░░ ░░ The job identifier is 2764. Jan 04 11:33:36 managed-node2 quadlet-demo-network[36179]: systemd-quadlet-demo Jan 04 11:33:36 managed-node2 systemd[1]: Finished quadlet-demo-network.service. ░░ Subject: A start job for unit quadlet-demo-network.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has finished successfully. ░░ ░░ The job identifier is 2764. Jan 04 11:33:37 managed-node2 python3.12[36317]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:33:39 managed-node2 python3.12[36450]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:40 managed-node2 python3.12[36581]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 04 11:33:40 managed-node2 python3.12[36686]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736008419.8707628-19085-91622550066190/.source.volume dest=/etc/containers/systemd/quadlet-demo-mysql.volume owner=root group=0 mode=0644 _original_basename=quadlet-demo-mysql.volume follow=False checksum=585f8cbdf0ec73000f9227dcffbef71e9552ea4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:41 managed-node2 python3.12[36817]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:33:41 managed-node2 systemd[1]: Reload requested from client PID 36818 ('systemctl') (unit session-5.scope)... Jan 04 11:33:41 managed-node2 systemd[1]: Reloading... Jan 04 11:33:41 managed-node2 systemd[1]: Reloading finished in 220 ms. Jan 04 11:33:41 managed-node2 python3.12[37004]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 04 11:33:41 managed-node2 systemd[1]: Starting quadlet-demo-mysql-volume.service... ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has begun execution. ░░ ░░ The job identifier is 2848. Jan 04 11:33:42 managed-node2 podman[37008]: 2025-01-04 11:33:42.030547669 -0500 EST m=+0.026014171 volume create systemd-quadlet-demo-mysql Jan 04 11:33:42 managed-node2 quadlet-demo-mysql-volume[37008]: systemd-quadlet-demo-mysql Jan 04 11:33:42 managed-node2 systemd[1]: Finished quadlet-demo-mysql-volume.service. ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has finished successfully. ░░ ░░ The job identifier is 2848. Jan 04 11:33:43 managed-node2 python3.12[37146]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:33:44 managed-node2 python3.12[37279]: ansible-file Invoked with path=/tmp/quadlet_demo state=directory owner=root group=root mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:51 managed-node2 podman[37418]: 2025-01-04 11:33:51.494779884 -0500 EST m=+5.942509700 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 04 11:33:51 managed-node2 python3.12[37729]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:52 managed-node2 python3.12[37860]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 04 11:33:52 managed-node2 python3.12[37965]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo-mysql.container owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736008432.0441234-19575-189994622198139/.source.container _original_basename=.c2vzggou follow=False checksum=ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:53 managed-node2 python3.12[38096]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:33:53 managed-node2 systemd[1]: Reload requested from client PID 38097 ('systemctl') (unit session-5.scope)... Jan 04 11:33:53 managed-node2 systemd[1]: Reloading... Jan 04 11:33:53 managed-node2 systemd[1]: Reloading finished in 217 ms. Jan 04 11:33:53 managed-node2 python3.12[38283]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 04 11:33:53 managed-node2 systemd[1]: Starting quadlet-demo-mysql.service... ░░ Subject: A start job for unit quadlet-demo-mysql.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has begun execution. ░░ ░░ The job identifier is 2932. Jan 04 11:33:54 managed-node2 podman[38287]: 2025-01-04 11:33:54.050588933 -0500 EST m=+0.038398037 container create 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.0751] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/9) Jan 04 11:33:54 managed-node2 kernel: podman2: port 1(veth2) entered blocking state Jan 04 11:33:54 managed-node2 kernel: podman2: port 1(veth2) entered disabled state Jan 04 11:33:54 managed-node2 kernel: veth2: entered allmulticast mode Jan 04 11:33:54 managed-node2 kernel: veth2: entered promiscuous mode Jan 04 11:33:54 managed-node2 kernel: podman2: port 1(veth2) entered blocking state Jan 04 11:33:54 managed-node2 kernel: podman2: port 1(veth2) entered forwarding state Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.0837] manager: (veth2): new Veth device (/org/freedesktop/NetworkManager/Devices/10) Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.0869] device (veth2): carrier: link connected Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.0871] device (podman2): carrier: link connected Jan 04 11:33:54 managed-node2 (udev-worker)[38303]: Network interface NamePolicy= disabled on kernel command line. Jan 04 11:33:54 managed-node2 (udev-worker)[38302]: Network interface NamePolicy= disabled on kernel command line. Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.1252] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.1263] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.1271] device (podman2): Activation: starting connection 'podman2' (7cd99f47-f94f-4713-adfd-4d35815d93d3) Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.1272] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.1286] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.1289] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.1293] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 04 11:33:54 managed-node2 podman[38287]: 2025-01-04 11:33:54.036000448 -0500 EST m=+0.023809823 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 04 11:33:54 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 3019. Jan 04 11:33:54 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 3019. Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.1741] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.1744] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 04 11:33:54 managed-node2 NetworkManager[784]: [1736008434.1752] device (podman2): Activation: successful, device activated. Jan 04 11:33:54 managed-node2 systemd[1]: Started 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-1404942fa2db9863.timer - /usr/bin/podman healthcheck run 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e. ░░ Subject: A start job for unit 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-1404942fa2db9863.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-1404942fa2db9863.timer has finished successfully. ░░ ░░ The job identifier is 3098. Jan 04 11:33:54 managed-node2 podman[38287]: 2025-01-04 11:33:54.224823382 -0500 EST m=+0.212632600 container init 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 04 11:33:54 managed-node2 systemd[1]: Started quadlet-demo-mysql.service. ░░ Subject: A start job for unit quadlet-demo-mysql.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has finished successfully. ░░ ░░ The job identifier is 2932. Jan 04 11:33:54 managed-node2 podman[38287]: 2025-01-04 11:33:54.279365523 -0500 EST m=+0.267174828 container start 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 04 11:33:54 managed-node2 quadlet-demo-mysql[38287]: 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e Jan 04 11:33:54 managed-node2 podman[38351]: 2025-01-04 11:33:54.409100651 -0500 EST m=+0.124781707 container health_status 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 04 11:33:55 managed-node2 python3.12[38541]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:33:56 managed-node2 python3.12[38685]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:56 managed-node2 python3.12[38816]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 04 11:33:57 managed-node2 python3.12[38921]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736008436.5920036-19781-220862317646466/.source.yml dest=/etc/containers/systemd/envoy-proxy-configmap.yml owner=root group=0 mode=0644 _original_basename=envoy-proxy-configmap.yml follow=False checksum=d681c7d56f912150d041873e880818b22a90c188 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:33:57 managed-node2 python3.12[39076]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:33:57 managed-node2 systemd[1]: Reload requested from client PID 39077 ('systemctl') (unit session-5.scope)... Jan 04 11:33:57 managed-node2 systemd[1]: Reloading... Jan 04 11:33:58 managed-node2 systemd[1]: Reloading finished in 229 ms. Jan 04 11:33:58 managed-node2 python3.12[39264]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:34:00 managed-node2 python3.12[39425]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:34:00 managed-node2 python3.12[39565]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 04 11:34:01 managed-node2 python3.12[39670]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736008440.4832172-19964-76184447797368/.source.yml _original_basename=.izqh1yla follow=False checksum=998dccde0483b1654327a46ddd89cbaa47650370 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:34:02 managed-node2 python3.12[39801]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:34:02 managed-node2 systemd[1]: Reload requested from client PID 39809 ('systemctl') (unit session-5.scope)... Jan 04 11:34:02 managed-node2 systemd[1]: Reloading... Jan 04 11:34:02 managed-node2 systemd[1]: Reloading finished in 231 ms. Jan 04 11:34:03 managed-node2 python3.12[39997]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:34:04 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 04 11:34:04 managed-node2 python3.12[40142]: ansible-slurp Invoked with path=/etc/containers/systemd/quadlet-demo.yml src=/etc/containers/systemd/quadlet-demo.yml Jan 04 11:34:05 managed-node2 python3.12[40285]: ansible-file Invoked with path=/tmp/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:34:05 managed-node2 python3.12[40416]: ansible-file Invoked with path=/tmp/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:34:18 managed-node2 podman[40555]: 2025-01-04 11:34:18.155808556 -0500 EST m=+12.224013735 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Jan 04 11:34:22 managed-node2 podman[40973]: 2025-01-04 11:34:22.482748384 -0500 EST m=+3.852421401 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Jan 04 11:34:22 managed-node2 python3.12[41236]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:34:23 managed-node2 python3.12[41367]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 04 11:34:23 managed-node2 python3.12[41472]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736008463.0030222-20502-2952385679144/.source.kube dest=/etc/containers/systemd/quadlet-demo.kube owner=root group=0 mode=0644 _original_basename=quadlet-demo.kube follow=False checksum=7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:34:24 managed-node2 python3.12[41603]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:34:24 managed-node2 systemd[1]: Reload requested from client PID 41604 ('systemctl') (unit session-5.scope)... Jan 04 11:34:24 managed-node2 systemd[1]: Reloading... Jan 04 11:34:24 managed-node2 systemd[1]: Reloading finished in 221 ms. Jan 04 11:34:24 managed-node2 podman[41768]: 2025-01-04 11:34:24.757102792 -0500 EST m=+0.122146579 container health_status 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 04 11:34:24 managed-node2 python3.12[41797]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 04 11:34:24 managed-node2 systemd[1]: Starting quadlet-demo.service... ░░ Subject: A start job for unit quadlet-demo.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has begun execution. ░░ ░░ The job identifier is 3332. Jan 04 11:34:25 managed-node2 quadlet-demo[41809]: Pods stopped: Jan 04 11:34:25 managed-node2 quadlet-demo[41809]: Pods removed: Jan 04 11:34:25 managed-node2 quadlet-demo[41809]: Secrets removed: Jan 04 11:34:25 managed-node2 quadlet-demo[41809]: Volumes removed: Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.013772373 -0500 EST m=+0.028358319 volume create wp-pv-claim Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.039427953 -0500 EST m=+0.054013906 container create f07164ee3e905df54a70f59e80d3960832e74eb9830cb83662de5ad55e2abf12 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.048209442 -0500 EST m=+0.062795385 volume create envoy-proxy-config Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.054725023 -0500 EST m=+0.069310938 volume create envoy-certificates Jan 04 11:34:25 managed-node2 systemd[1]: Created slice machine-libpod_pod_56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3.slice - cgroup machine-libpod_pod_56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3.slice. ░░ Subject: A start job for unit machine-libpod_pod_56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3.slice has finished successfully. ░░ ░░ The job identifier is 3419. Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.099796146 -0500 EST m=+0.114382082 container create 65b9ce253bc156b6f3d547f7ab92379b31509f467860bb1e1e8fb7619c76b9b3 (image=localhost/podman-pause:5.3.1-1733097600, name=56bbf7fc2aa4-infra, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.106000364 -0500 EST m=+0.120586274 pod create 56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3 (image=, name=quadlet-demo) Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.143568363 -0500 EST m=+0.158154412 container create 99436d65d89af03f14af27dcb71779c19cd57b99f6b1b50d15c2382093ca4c08 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.174829744 -0500 EST m=+0.189415811 container create 23c3f79e2bc8df0b3cda71af6c29855118c1459ae9e282ca023ba08f99e48001 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.175162259 -0500 EST m=+0.189748206 container restart f07164ee3e905df54a70f59e80d3960832e74eb9830cb83662de5ad55e2abf12 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:34:25 managed-node2 systemd[23226]: Starting grub-boot-success.service - Mark boot as successful... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 118. Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.11220576 -0500 EST m=+0.126791876 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.147419493 -0500 EST m=+0.162005572 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Jan 04 11:34:25 managed-node2 systemd[1]: Started libpod-f07164ee3e905df54a70f59e80d3960832e74eb9830cb83662de5ad55e2abf12.scope - libcrun container. ░░ Subject: A start job for unit libpod-f07164ee3e905df54a70f59e80d3960832e74eb9830cb83662de5ad55e2abf12.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-f07164ee3e905df54a70f59e80d3960832e74eb9830cb83662de5ad55e2abf12.scope has finished successfully. ░░ ░░ The job identifier is 3425. Jan 04 11:34:25 managed-node2 systemd[23226]: Finished grub-boot-success.service - Mark boot as successful. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 118. Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.218300069 -0500 EST m=+0.232886160 container init f07164ee3e905df54a70f59e80d3960832e74eb9830cb83662de5ad55e2abf12 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.220459012 -0500 EST m=+0.235045097 container start f07164ee3e905df54a70f59e80d3960832e74eb9830cb83662de5ad55e2abf12 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:34:25 managed-node2 kernel: podman2: port 2(veth3) entered blocking state Jan 04 11:34:25 managed-node2 kernel: podman2: port 2(veth3) entered disabled state Jan 04 11:34:25 managed-node2 kernel: veth3: entered allmulticast mode Jan 04 11:34:25 managed-node2 kernel: veth3: entered promiscuous mode Jan 04 11:34:25 managed-node2 kernel: podman2: port 2(veth3) entered blocking state Jan 04 11:34:25 managed-node2 kernel: podman2: port 2(veth3) entered forwarding state Jan 04 11:34:25 managed-node2 NetworkManager[784]: [1736008465.2503] manager: (veth3): new Veth device (/org/freedesktop/NetworkManager/Devices/11) Jan 04 11:34:25 managed-node2 NetworkManager[784]: [1736008465.2541] device (veth3): carrier: link connected Jan 04 11:34:25 managed-node2 (udev-worker)[41833]: Network interface NamePolicy= disabled on kernel command line. Jan 04 11:34:25 managed-node2 systemd[1]: Started libpod-65b9ce253bc156b6f3d547f7ab92379b31509f467860bb1e1e8fb7619c76b9b3.scope - libcrun container. ░░ Subject: A start job for unit libpod-65b9ce253bc156b6f3d547f7ab92379b31509f467860bb1e1e8fb7619c76b9b3.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-65b9ce253bc156b6f3d547f7ab92379b31509f467860bb1e1e8fb7619c76b9b3.scope has finished successfully. ░░ ░░ The job identifier is 3432. Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.335532288 -0500 EST m=+0.350118287 container init 65b9ce253bc156b6f3d547f7ab92379b31509f467860bb1e1e8fb7619c76b9b3 (image=localhost/podman-pause:5.3.1-1733097600, name=56bbf7fc2aa4-infra, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.337740363 -0500 EST m=+0.352326472 container start 65b9ce253bc156b6f3d547f7ab92379b31509f467860bb1e1e8fb7619c76b9b3 (image=localhost/podman-pause:5.3.1-1733097600, name=56bbf7fc2aa4-infra, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 04 11:34:25 managed-node2 systemd[1]: Started libpod-99436d65d89af03f14af27dcb71779c19cd57b99f6b1b50d15c2382093ca4c08.scope - libcrun container. ░░ Subject: A start job for unit libpod-99436d65d89af03f14af27dcb71779c19cd57b99f6b1b50d15c2382093ca4c08.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-99436d65d89af03f14af27dcb71779c19cd57b99f6b1b50d15c2382093ca4c08.scope has finished successfully. ░░ ░░ The job identifier is 3439. Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.389982183 -0500 EST m=+0.404568172 container init 99436d65d89af03f14af27dcb71779c19cd57b99f6b1b50d15c2382093ca4c08 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.392369406 -0500 EST m=+0.406955405 container start 99436d65d89af03f14af27dcb71779c19cd57b99f6b1b50d15c2382093ca4c08 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:34:25 managed-node2 systemd[1]: Started libpod-23c3f79e2bc8df0b3cda71af6c29855118c1459ae9e282ca023ba08f99e48001.scope - libcrun container. ░░ Subject: A start job for unit libpod-23c3f79e2bc8df0b3cda71af6c29855118c1459ae9e282ca023ba08f99e48001.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-23c3f79e2bc8df0b3cda71af6c29855118c1459ae9e282ca023ba08f99e48001.scope has finished successfully. ░░ ░░ The job identifier is 3446. Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.459204733 -0500 EST m=+0.473790707 container init 23c3f79e2bc8df0b3cda71af6c29855118c1459ae9e282ca023ba08f99e48001 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.461460007 -0500 EST m=+0.476046011 container start 23c3f79e2bc8df0b3cda71af6c29855118c1459ae9e282ca023ba08f99e48001 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:34:25 managed-node2 podman[41809]: 2025-01-04 11:34:25.468364484 -0500 EST m=+0.482950426 pod start 56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3 (image=, name=quadlet-demo) Jan 04 11:34:25 managed-node2 systemd[1]: Started quadlet-demo.service. ░░ Subject: A start job for unit quadlet-demo.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has finished successfully. ░░ ░░ The job identifier is 3332. Jan 04 11:34:25 managed-node2 quadlet-demo[41809]: Volumes: Jan 04 11:34:25 managed-node2 quadlet-demo[41809]: wp-pv-claim Jan 04 11:34:25 managed-node2 quadlet-demo[41809]: Pod: Jan 04 11:34:25 managed-node2 quadlet-demo[41809]: 56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3 Jan 04 11:34:25 managed-node2 quadlet-demo[41809]: Containers: Jan 04 11:34:25 managed-node2 quadlet-demo[41809]: 99436d65d89af03f14af27dcb71779c19cd57b99f6b1b50d15c2382093ca4c08 Jan 04 11:34:25 managed-node2 quadlet-demo[41809]: 23c3f79e2bc8df0b3cda71af6c29855118c1459ae9e282ca023ba08f99e48001 Jan 04 11:34:26 managed-node2 python3.12[42060]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/containers/systemd _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:34:26 managed-node2 python3.12[42267]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:34:26 managed-node2 python3.12[42466]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:34:27 managed-node2 python3.12[42610]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:34:27 managed-node2 python3.12[42750]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:34:28 managed-node2 python3.12[42884]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:34:33 managed-node2 python3.12[43015]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:34:39 managed-node2 python3.12[43146]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:34:44 managed-node2 python3.12[43277]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:34:50 managed-node2 python3.12[43408]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:34:55 managed-node2 podman[43517]: 2025-01-04 11:34:55.652880074 -0500 EST m=+0.122337576 container health_status 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=0, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 04 11:34:55 managed-node2 python3.12[43546]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:35:01 managed-node2 python3.12[43687]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:35:01 managed-node2 python3.12[43818]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:02 managed-node2 python3.12[43950]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:02 managed-node2 python3.12[44089]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:03 managed-node2 python3.12[44228]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units --all | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:03 managed-node2 python3.12[44362]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/systemd/system _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:05 managed-node2 python3.12[44625]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:06 managed-node2 python3.12[44762]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:35:07 managed-node2 python3.12[44895]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 04 11:35:08 managed-node2 python3.12[45027]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 04 11:35:09 managed-node2 python3.12[45160]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 04 11:35:09 managed-node2 python3.12[45293]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 04 11:35:10 managed-node2 python3.12[45424]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 04 11:35:14 managed-node2 python3.12[45970]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:35:15 managed-node2 python3.12[46103]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 04 11:35:15 managed-node2 systemd[1]: Reload requested from client PID 46106 ('systemctl') (unit session-5.scope)... Jan 04 11:35:15 managed-node2 systemd[1]: Reloading... Jan 04 11:35:15 managed-node2 systemd[1]: Reloading finished in 233 ms. Jan 04 11:35:15 managed-node2 systemd[1]: Stopping quadlet-demo.service... ░░ Subject: A stop job for unit quadlet-demo.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo.service has begun execution. ░░ ░░ The job identifier is 3531. Jan 04 11:35:15 managed-node2 systemd[1]: libpod-f07164ee3e905df54a70f59e80d3960832e74eb9830cb83662de5ad55e2abf12.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-f07164ee3e905df54a70f59e80d3960832e74eb9830cb83662de5ad55e2abf12.scope has successfully entered the 'dead' state. Jan 04 11:35:15 managed-node2 conmon[41819]: conmon f07164ee3e905df54a70 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/libpod-f07164ee3e905df54a70f59e80d3960832e74eb9830cb83662de5ad55e2abf12.scope/container/memory.events Jan 04 11:35:15 managed-node2 podman[46164]: 2025-01-04 11:35:15.792589131 -0500 EST m=+0.021887030 container died f07164ee3e905df54a70f59e80d3960832e74eb9830cb83662de5ad55e2abf12 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:35:15 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-f07164ee3e905df54a70f59e80d3960832e74eb9830cb83662de5ad55e2abf12-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-f07164ee3e905df54a70f59e80d3960832e74eb9830cb83662de5ad55e2abf12-userdata-shm.mount has successfully entered the 'dead' state. Jan 04 11:35:15 managed-node2 podman[46164]: 2025-01-04 11:35:15.821942938 -0500 EST m=+0.051240760 container cleanup f07164ee3e905df54a70f59e80d3960832e74eb9830cb83662de5ad55e2abf12 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:35:15 managed-node2 systemd[1]: var-lib-containers-storage-overlay-8a8b301c62009c34a8d870eefc5d14412a992a5c421905e5b3327aae7f50f02a-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-8a8b301c62009c34a8d870eefc5d14412a992a5c421905e5b3327aae7f50f02a-merged.mount has successfully entered the 'dead' state. Jan 04 11:35:15 managed-node2 podman[46173]: 2025-01-04 11:35:15.860109587 -0500 EST m=+0.024422376 pod stop 56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3 (image=, name=quadlet-demo) Jan 04 11:35:15 managed-node2 systemd[1]: libpod-65b9ce253bc156b6f3d547f7ab92379b31509f467860bb1e1e8fb7619c76b9b3.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-65b9ce253bc156b6f3d547f7ab92379b31509f467860bb1e1e8fb7619c76b9b3.scope has successfully entered the 'dead' state. Jan 04 11:35:15 managed-node2 podman[46173]: 2025-01-04 11:35:15.878548552 -0500 EST m=+0.042861380 container died 65b9ce253bc156b6f3d547f7ab92379b31509f467860bb1e1e8fb7619c76b9b3 (image=localhost/podman-pause:5.3.1-1733097600, name=56bbf7fc2aa4-infra, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 04 11:35:15 managed-node2 systemd[1]: libpod-23c3f79e2bc8df0b3cda71af6c29855118c1459ae9e282ca023ba08f99e48001.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-23c3f79e2bc8df0b3cda71af6c29855118c1459ae9e282ca023ba08f99e48001.scope has successfully entered the 'dead' state. Jan 04 11:35:15 managed-node2 podman[46173]: 2025-01-04 11:35:15.896050919 -0500 EST m=+0.060363794 container died 23c3f79e2bc8df0b3cda71af6c29855118c1459ae9e282ca023ba08f99e48001 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:35:15 managed-node2 kernel: podman2: port 2(veth3) entered disabled state Jan 04 11:35:15 managed-node2 kernel: veth3 (unregistering): left allmulticast mode Jan 04 11:35:15 managed-node2 kernel: veth3 (unregistering): left promiscuous mode Jan 04 11:35:15 managed-node2 kernel: podman2: port 2(veth3) entered disabled state Jan 04 11:35:15 managed-node2 systemd[1]: run-netns-netns\x2d55b57b64\x2d2473\x2d4fdc\x2ddf3d\x2df841e97bba0d.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d55b57b64\x2d2473\x2d4fdc\x2ddf3d\x2df841e97bba0d.mount has successfully entered the 'dead' state. Jan 04 11:35:15 managed-node2 systemd[1]: var-lib-containers-storage-overlay-91ef28e06d0e8da66bf9cd7d22c2cea6a6040b3d5dc114bf2a4788a96dd8e802-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-91ef28e06d0e8da66bf9cd7d22c2cea6a6040b3d5dc114bf2a4788a96dd8e802-merged.mount has successfully entered the 'dead' state. Jan 04 11:35:15 managed-node2 podman[46173]: 2025-01-04 11:35:15.992316207 -0500 EST m=+0.156628928 container cleanup 23c3f79e2bc8df0b3cda71af6c29855118c1459ae9e282ca023ba08f99e48001 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:35:16 managed-node2 systemd[1]: var-lib-containers-storage-overlay-051bc763f0435fc1778c64a3200f64e7038b4d69bd80a053d7bc1a552262f731-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-051bc763f0435fc1778c64a3200f64e7038b4d69bd80a053d7bc1a552262f731-merged.mount has successfully entered the 'dead' state. Jan 04 11:35:16 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-65b9ce253bc156b6f3d547f7ab92379b31509f467860bb1e1e8fb7619c76b9b3-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-65b9ce253bc156b6f3d547f7ab92379b31509f467860bb1e1e8fb7619c76b9b3-userdata-shm.mount has successfully entered the 'dead' state. Jan 04 11:35:16 managed-node2 podman[46173]: 2025-01-04 11:35:16.015565927 -0500 EST m=+0.179878624 container cleanup 65b9ce253bc156b6f3d547f7ab92379b31509f467860bb1e1e8fb7619c76b9b3 (image=localhost/podman-pause:5.3.1-1733097600, name=56bbf7fc2aa4-infra, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 04 11:35:16 managed-node2 systemd[1]: libpod-99436d65d89af03f14af27dcb71779c19cd57b99f6b1b50d15c2382093ca4c08.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-99436d65d89af03f14af27dcb71779c19cd57b99f6b1b50d15c2382093ca4c08.scope has successfully entered the 'dead' state. Jan 04 11:35:16 managed-node2 podman[46173]: 2025-01-04 11:35:16.057981941 -0500 EST m=+0.222294614 container died 99436d65d89af03f14af27dcb71779c19cd57b99f6b1b50d15c2382093ca4c08 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:35:16 managed-node2 podman[46173]: 2025-01-04 11:35:16.085445397 -0500 EST m=+0.249758155 container cleanup 99436d65d89af03f14af27dcb71779c19cd57b99f6b1b50d15c2382093ca4c08 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:35:16 managed-node2 systemd[1]: Removed slice machine-libpod_pod_56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3.slice - cgroup machine-libpod_pod_56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3.slice. ░░ Subject: A stop job for unit machine-libpod_pod_56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3.slice has finished. ░░ ░░ The job identifier is 3533 and the job result is done. Jan 04 11:35:16 managed-node2 podman[46173]: 2025-01-04 11:35:16.114362172 -0500 EST m=+0.278674830 container remove 99436d65d89af03f14af27dcb71779c19cd57b99f6b1b50d15c2382093ca4c08 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:35:16 managed-node2 podman[46173]: 2025-01-04 11:35:16.135208226 -0500 EST m=+0.299520887 container remove 23c3f79e2bc8df0b3cda71af6c29855118c1459ae9e282ca023ba08f99e48001 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:35:16 managed-node2 podman[46173]: 2025-01-04 11:35:16.161539512 -0500 EST m=+0.325852173 container remove 65b9ce253bc156b6f3d547f7ab92379b31509f467860bb1e1e8fb7619c76b9b3 (image=localhost/podman-pause:5.3.1-1733097600, name=56bbf7fc2aa4-infra, pod_id=56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 04 11:35:16 managed-node2 systemd[1]: machine-libpod_pod_56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3.slice: Failed to open /run/systemd/transient/machine-libpod_pod_56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3.slice: No such file or directory Jan 04 11:35:16 managed-node2 podman[46173]: 2025-01-04 11:35:16.170546059 -0500 EST m=+0.334858692 pod remove 56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3 (image=, name=quadlet-demo) Jan 04 11:35:16 managed-node2 podman[46173]: 2025-01-04 11:35:16.197175279 -0500 EST m=+0.361487942 container remove f07164ee3e905df54a70f59e80d3960832e74eb9830cb83662de5ad55e2abf12 (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 04 11:35:16 managed-node2 quadlet-demo[46173]: Pods stopped: Jan 04 11:35:16 managed-node2 quadlet-demo[46173]: 56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3 Jan 04 11:35:16 managed-node2 quadlet-demo[46173]: Pods removed: Jan 04 11:35:16 managed-node2 quadlet-demo[46173]: 56bbf7fc2aa4ed9cd8fde8a56ecc4ec31f3bbcf984a11cbc924d60415bcfb0c3 Jan 04 11:35:16 managed-node2 quadlet-demo[46173]: Secrets removed: Jan 04 11:35:16 managed-node2 quadlet-demo[46173]: Volumes removed: Jan 04 11:35:16 managed-node2 systemd[1]: quadlet-demo.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo.service has successfully entered the 'dead' state. Jan 04 11:35:16 managed-node2 systemd[1]: Stopped quadlet-demo.service. ░░ Subject: A stop job for unit quadlet-demo.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo.service has finished. ░░ ░░ The job identifier is 3531 and the job result is done. Jan 04 11:35:16 managed-node2 python3.12[46350]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:35:16 managed-node2 systemd[1]: var-lib-containers-storage-overlay-a4572646436e0dd3cca7f96a845994776ec9f38ff407f4b81ed14bcf47a15866-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-a4572646436e0dd3cca7f96a845994776ec9f38ff407f4b81ed14bcf47a15866-merged.mount has successfully entered the 'dead' state. Jan 04 11:35:17 managed-node2 python3.12[46614]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.kube state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:35:18 managed-node2 python3.12[46745]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:35:18 managed-node2 systemd[1]: Reload requested from client PID 46746 ('systemctl') (unit session-5.scope)... Jan 04 11:35:18 managed-node2 systemd[1]: Reloading... Jan 04 11:35:18 managed-node2 systemd[1]: Reloading finished in 225 ms. Jan 04 11:35:19 managed-node2 python3.12[46932]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:19 managed-node2 podman[46933]: 2025-01-04 11:35:19.617138378 -0500 EST m=+0.560260691 image untag fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Jan 04 11:35:19 managed-node2 podman[46933]: 2025-01-04 11:35:19.075568356 -0500 EST m=+0.018690752 image remove fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b Jan 04 11:35:19 managed-node2 podman[46933]: 2025-01-04 11:35:19.74130553 -0500 EST m=+0.684427846 image untag 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Jan 04 11:35:19 managed-node2 podman[46933]: 2025-01-04 11:35:19.617148134 -0500 EST m=+0.560270405 image remove 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d Jan 04 11:35:20 managed-node2 python3.12[47071]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:20 managed-node2 python3.12[47210]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:21 managed-node2 python3.12[47349]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:21 managed-node2 python3.12[47488]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:23 managed-node2 python3.12[47903]: ansible-service_facts Invoked Jan 04 11:35:25 managed-node2 python3.12[48141]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:35:25 managed-node2 podman[48142]: 2025-01-04 11:35:25.844986577 -0500 EST m=+0.062457142 container health_status 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, health_status=healthy, health_failing_streak=1, health_log=, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 04 11:35:25 managed-node2 systemd[1]: 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-1404942fa2db9863.service: Main process exited, code=exited, status=125/n/a ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-1404942fa2db9863.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 125. Jan 04 11:35:25 managed-node2 systemd[1]: 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-1404942fa2db9863.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-1404942fa2db9863.service has entered the 'failed' state with result 'exit-code'. Jan 04 11:35:27 managed-node2 python3.12[48283]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:35:28 managed-node2 python3.12[48547]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:35:28 managed-node2 python3.12[48678]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:35:28 managed-node2 systemd[1]: Reload requested from client PID 48679 ('systemctl') (unit session-5.scope)... Jan 04 11:35:28 managed-node2 systemd[1]: Reloading... Jan 04 11:35:28 managed-node2 systemd[1]: Reloading finished in 228 ms. Jan 04 11:35:29 managed-node2 podman[48867]: 2025-01-04 11:35:29.475126237 -0500 EST m=+0.025386240 volume remove envoy-proxy-config Jan 04 11:35:29 managed-node2 podman[49006]: 2025-01-04 11:35:29.865103759 -0500 EST m=+0.023188673 volume remove envoy-certificates Jan 04 11:35:30 managed-node2 podman[49145]: 2025-01-04 11:35:30.284813415 -0500 EST m=+0.057802837 volume remove wp-pv-claim Jan 04 11:35:30 managed-node2 python3.12[49283]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:31 managed-node2 python3.12[49421]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:31 managed-node2 python3.12[49560]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:32 managed-node2 python3.12[49698]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:32 managed-node2 python3.12[49837]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:34 managed-node2 python3.12[50252]: ansible-service_facts Invoked Jan 04 11:35:36 managed-node2 python3.12[50490]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:35:38 managed-node2 python3.12[50623]: ansible-stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:35:39 managed-node2 python3.12[50887]: ansible-file Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:35:39 managed-node2 python3.12[51018]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:35:39 managed-node2 systemd[1]: Reload requested from client PID 51019 ('systemctl') (unit session-5.scope)... Jan 04 11:35:39 managed-node2 systemd[1]: Reloading... Jan 04 11:35:40 managed-node2 systemd[1]: Reloading finished in 223 ms. Jan 04 11:35:40 managed-node2 python3.12[51205]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:41 managed-node2 python3.12[51343]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:41 managed-node2 python3.12[51481]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:42 managed-node2 python3.12[51620]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:42 managed-node2 python3.12[51759]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:43 managed-node2 python3.12[52175]: ansible-service_facts Invoked Jan 04 11:35:46 managed-node2 python3.12[52413]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:35:48 managed-node2 python3.12[52546]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 04 11:35:48 managed-node2 systemd[1]: Reload requested from client PID 52549 ('systemctl') (unit session-5.scope)... Jan 04 11:35:48 managed-node2 systemd[1]: Reloading... Jan 04 11:35:48 managed-node2 systemd[1]: Reloading finished in 226 ms. Jan 04 11:35:48 managed-node2 systemd[1]: Stopping quadlet-demo-mysql.service... ░░ Subject: A stop job for unit quadlet-demo-mysql.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo-mysql.service has begun execution. ░░ ░░ The job identifier is 3612. Jan 04 11:35:50 managed-node2 podman[52607]: 2025-01-04 11:35:50.105081509 -0500 EST m=+1.464830960 container died 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 04 11:35:50 managed-node2 systemd[1]: 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-1404942fa2db9863.timer: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-1404942fa2db9863.timer has successfully entered the 'dead' state. Jan 04 11:35:50 managed-node2 systemd[1]: Stopped 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-1404942fa2db9863.timer - /usr/bin/podman healthcheck run 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e. ░░ Subject: A stop job for unit 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-1404942fa2db9863.timer has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-1404942fa2db9863.timer has finished. ░░ ░░ The job identifier is 3613 and the job result is done. Jan 04 11:35:50 managed-node2 kernel: podman2: port 1(veth2) entered disabled state Jan 04 11:35:50 managed-node2 kernel: veth2 (unregistering): left allmulticast mode Jan 04 11:35:50 managed-node2 kernel: veth2 (unregistering): left promiscuous mode Jan 04 11:35:50 managed-node2 kernel: podman2: port 1(veth2) entered disabled state Jan 04 11:35:50 managed-node2 NetworkManager[784]: [1736008550.1602] device (podman2): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 04 11:35:50 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 3616. Jan 04 11:35:50 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 3616. Jan 04 11:35:50 managed-node2 systemd[1]: run-netns-netns\x2d5e539bdd\x2d6fad\x2db3ff\x2dcba9\x2d50ecc0131efc.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d5e539bdd\x2d6fad\x2db3ff\x2dcba9\x2d50ecc0131efc.mount has successfully entered the 'dead' state. Jan 04 11:35:50 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e-userdata-shm.mount has successfully entered the 'dead' state. Jan 04 11:35:50 managed-node2 systemd[1]: var-lib-containers-storage-overlay-f2934737174641a5bbb7ab80e47d21bb70dfbb1e1676aa4b57a65dc9936d4a85-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-f2934737174641a5bbb7ab80e47d21bb70dfbb1e1676aa4b57a65dc9936d4a85-merged.mount has successfully entered the 'dead' state. Jan 04 11:35:50 managed-node2 podman[52607]: 2025-01-04 11:35:50.260530073 -0500 EST m=+1.620279239 container remove 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 04 11:35:50 managed-node2 quadlet-demo-mysql[52607]: 1ce48534b65f833b19838c37c2ca5368b668e9a68f0d818e917cf94a8465c20e Jan 04 11:35:50 managed-node2 systemd[1]: quadlet-demo-mysql.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-mysql.service has successfully entered the 'dead' state. Jan 04 11:35:50 managed-node2 systemd[1]: Stopped quadlet-demo-mysql.service. ░░ Subject: A stop job for unit quadlet-demo-mysql.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo-mysql.service has finished. ░░ ░░ The job identifier is 3612 and the job result is done. Jan 04 11:35:50 managed-node2 python3.12[52787]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:35:51 managed-node2 python3.12[53051]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:35:52 managed-node2 python3.12[53182]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:35:52 managed-node2 systemd[1]: Reload requested from client PID 53183 ('systemctl') (unit session-5.scope)... Jan 04 11:35:52 managed-node2 systemd[1]: Reloading... Jan 04 11:35:52 managed-node2 systemd[1]: Reloading finished in 218 ms. Jan 04 11:35:53 managed-node2 python3.12[53509]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:53 managed-node2 podman[53510]: 2025-01-04 11:35:53.994825528 -0500 EST m=+0.206309108 image untag dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 04 11:35:53 managed-node2 podman[53510]: 2025-01-04 11:35:53.804940168 -0500 EST m=+0.016423911 image remove dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 Jan 04 11:35:54 managed-node2 python3.12[53647]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:55 managed-node2 python3.12[53786]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:55 managed-node2 python3.12[53924]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:55 managed-node2 python3.12[54062]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:35:57 managed-node2 python3.12[54479]: ansible-service_facts Invoked Jan 04 11:36:00 managed-node2 python3.12[54716]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:36:00 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 04 11:36:01 managed-node2 python3.12[54850]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 04 11:36:01 managed-node2 systemd[1]: Reload requested from client PID 54853 ('systemctl') (unit session-5.scope)... Jan 04 11:36:01 managed-node2 systemd[1]: Reloading... Jan 04 11:36:01 managed-node2 systemd[1]: Reloading finished in 215 ms. Jan 04 11:36:01 managed-node2 systemd[1]: quadlet-demo-mysql-volume.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-mysql-volume.service has successfully entered the 'dead' state. Jan 04 11:36:01 managed-node2 systemd[1]: Stopped quadlet-demo-mysql-volume.service. ░░ Subject: A stop job for unit quadlet-demo-mysql-volume.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo-mysql-volume.service has finished. ░░ ░░ The job identifier is 3695 and the job result is done. Jan 04 11:36:02 managed-node2 python3.12[55041]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:36:03 managed-node2 python3.12[55305]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:36:03 managed-node2 python3.12[55436]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:36:03 managed-node2 systemd[1]: Reload requested from client PID 55437 ('systemctl') (unit session-5.scope)... Jan 04 11:36:03 managed-node2 systemd[1]: Reloading... Jan 04 11:36:03 managed-node2 systemd[1]: Reloading finished in 209 ms. Jan 04 11:36:04 managed-node2 podman[55624]: 2025-01-04 11:36:04.357487744 -0500 EST m=+0.025230093 volume remove systemd-quadlet-demo-mysql Jan 04 11:36:04 managed-node2 python3.12[55761]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:36:05 managed-node2 python3.12[55900]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:36:05 managed-node2 python3.12[56038]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:36:06 managed-node2 python3.12[56176]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:36:06 managed-node2 python3.12[56315]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:36:07 managed-node2 python3.12[56731]: ansible-service_facts Invoked Jan 04 11:36:10 managed-node2 python3.12[56969]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:36:11 managed-node2 python3.12[57102]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 04 11:36:11 managed-node2 systemd[1]: Reload requested from client PID 57105 ('systemctl') (unit session-5.scope)... Jan 04 11:36:11 managed-node2 systemd[1]: Reloading... Jan 04 11:36:11 managed-node2 systemd[1]: Reloading finished in 209 ms. Jan 04 11:36:11 managed-node2 systemd[1]: quadlet-demo-network.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-network.service has successfully entered the 'dead' state. Jan 04 11:36:11 managed-node2 systemd[1]: Stopped quadlet-demo-network.service. ░░ Subject: A stop job for unit quadlet-demo-network.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo-network.service has finished. ░░ ░░ The job identifier is 3696 and the job result is done. Jan 04 11:36:12 managed-node2 python3.12[57293]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 04 11:36:13 managed-node2 python3.12[57557]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 04 11:36:13 managed-node2 python3.12[57688]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 04 11:36:13 managed-node2 systemd[1]: Reload requested from client PID 57689 ('systemctl') (unit session-5.scope)... Jan 04 11:36:13 managed-node2 systemd[1]: Reloading... Jan 04 11:36:14 managed-node2 systemd[1]: Reloading finished in 213 ms. Jan 04 11:36:14 managed-node2 systemd[1]: Starting fstrim.service - Discard unused blocks on filesystems from /etc/fstab... ░░ Subject: A start job for unit fstrim.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit fstrim.service has begun execution. ░░ ░░ The job identifier is 3697. Jan 04 11:36:14 managed-node2 systemd[1]: fstrim.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit fstrim.service has successfully entered the 'dead' state. Jan 04 11:36:14 managed-node2 systemd[1]: Finished fstrim.service - Discard unused blocks on filesystems from /etc/fstab. ░░ Subject: A start job for unit fstrim.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit fstrim.service has finished successfully. ░░ ░░ The job identifier is 3697. Jan 04 11:36:15 managed-node2 python3.12[58016]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:36:15 managed-node2 python3.12[58154]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:36:16 managed-node2 python3.12[58293]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:36:16 managed-node2 python3.12[58431]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:36:16 managed-node2 python3.12[58570]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:36:18 managed-node2 python3.12[58986]: ansible-service_facts Invoked Jan 04 11:36:20 managed-node2 python3.12[59224]: ansible-ansible.legacy.command Invoked with _raw_params=exec 1>&2 set -x set -o pipefail systemctl list-units --plain -l --all | grep quadlet || : systemctl list-unit-files --all | grep quadlet || : systemctl list-units --plain --failed -l --all | grep quadlet || : _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 04 11:36:21 managed-node2 python3.12[59362]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None PLAY RECAP ********************************************************************* managed-node2 : ok=397 changed=47 unreachable=0 failed=2 skipped=376 rescued=2 ignored=0 TASKS RECAP ******************************************************************** Saturday 04 January 2025 11:36:21 -0500 (0:00:00.519) 0:03:17.487 ****** =============================================================================== Check web -------------------------------------------------------------- 33.36s /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:121 fedora.linux_system_roles.podman : Ensure container images are present -- 17.04s /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : Ensure container images are present --- 6.59s /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.certificate : Ensure provider packages are installed --- 2.85s /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23 fedora.linux_system_roles.podman : Stop and disable service ------------- 2.55s /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed --- 2.23s /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.19s /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.16s /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 1.95s /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 1.95s /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 1.89s /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 1.87s /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Gathering Facts --------------------------------------------------------- 1.37s /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9 fedora.linux_system_roles.certificate : Ensure provider service is running --- 1.31s /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:90 fedora.linux_system_roles.podman : Stop and disable service ------------- 1.27s /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 fedora.linux_system_roles.podman : Remove volumes ----------------------- 1.25s /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 fedora.linux_system_roles.certificate : Slurp the contents of the files --- 1.20s /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.15s /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Start service ------------------------ 1.14s /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 fedora.linux_system_roles.firewall : Configure firewall ----------------- 1.12s /tmp/collections-nXs/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71