ansible-playbook [core 2.17.5] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-jiT executable location = /usr/local/bin/ansible-playbook python version = 3.12.6 (main, Sep 9 2024, 00:00:00) [GCC 11.5.0 20240719 (Red Hat 11.5.0-2)] (/usr/bin/python3.12) jinja version = 3.1.4 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_share_system_dir.yml ******************************************* 1 plays in /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml PLAY [Ensure that the role can share tangd.socket.d directory with other files] *** TASK [Create the tangd.socket.d directory] ************************************* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:14 Saturday 12 October 2024 13:39:59 -0400 (0:00:00.015) 0:00:00.015 ****** [WARNING]: Platform linux on host managed-node3 is using the discovered Python interpreter at /usr/bin/python3.9, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. changed: [managed-node3] => { "ansible_facts": { "discovered_interpreter_python": "/usr/bin/python3.9" }, "changed": true, "gid": 0, "group": "root", "mode": "0775", "owner": "root", "path": "/etc/systemd/system/tangd.socket.d", "secontext": "unconfined_u:object_r:systemd_unit_file_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [Create a customization systemd file] ************************************* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:20 Saturday 12 October 2024 13:39:59 -0400 (0:00:00.689) 0:00:00.704 ****** changed: [managed-node3] => { "changed": true, "checksum": "05987691cc309e84627f31fa0d1680a3b3b2c4b2", "dest": "/etc/systemd/system/tangd.socket.d/override2.conf", "gid": 0, "group": "root", "md5sum": "fb9de2e8557683271457053efbe78252", "mode": "0664", "owner": "root", "secontext": "system_u:object_r:tangd_unit_file_t:s0", "size": 28, "src": "/root/.ansible/tmp/ansible-tmp-1728754799.7418828-10128-106422745829600/.source.conf", "state": "file", "uid": 0 } TASK [Run role] **************************************************************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:28 Saturday 12 October 2024 13:40:00 -0400 (0:00:00.885) 0:00:01.590 ****** included: fedora.linux_system_roles.nbde_server for managed-node3 TASK [fedora.linux_system_roles.nbde_server : Set version specific variables] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main.yml:6 Saturday 12 October 2024 13:40:00 -0400 (0:00:00.044) 0:00:01.634 ****** included: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.nbde_server : Ensure ansible_facts used by role] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:2 Saturday 12 October 2024 13:40:00 -0400 (0:00:00.025) 0:00:01.660 ****** ok: [managed-node3] TASK [fedora.linux_system_roles.nbde_server : Check if system is ostree] ******* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:10 Saturday 12 October 2024 13:40:01 -0400 (0:00:00.770) 0:00:02.431 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.nbde_server : Set flag to indicate system is ostree] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:15 Saturday 12 October 2024 13:40:01 -0400 (0:00:00.452) 0:00:02.883 ****** ok: [managed-node3] => { "ansible_facts": { "__nbde_server_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.nbde_server : Set platform/version specific variables] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:19 Saturday 12 October 2024 13:40:01 -0400 (0:00:00.041) 0:00:02.925 ****** ok: [managed-node3] => { "ansible_facts": { "__nbde_server_cachedir": "/var/cache/tang", "__nbde_server_group": "tang", "__nbde_server_keydir": "/var/db/tang", "__nbde_server_keygen": "/usr/libexec/tangd-keygen", "__nbde_server_packages": [ "tang" ], "__nbde_server_services": [ "tangd.socket" ], "__nbde_server_update": "/usr/libexec/tangd-update", "__nbde_server_user": "tang" }, "ansible_included_var_files": [ "/tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/vars/default.yml" ], "changed": false } TASK [fedora.linux_system_roles.nbde_server : Include the appropriate provider tasks] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main.yml:9 Saturday 12 October 2024 13:40:01 -0400 (0:00:00.079) 0:00:03.005 ****** included: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml for managed-node3 TASK [fedora.linux_system_roles.nbde_server : Ensure tang is installed] ******** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:2 Saturday 12 October 2024 13:40:02 -0400 (0:00:00.090) 0:00:03.095 ****** ok: [managed-node3] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: tang TASK [fedora.linux_system_roles.nbde_server : Ensure keys are rotated] ********* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:8 Saturday 12 October 2024 13:40:03 -0400 (0:00:01.805) 0:00:04.901 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "nbde_server_rotate_keys | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Ensure we have keys] ************* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:17 Saturday 12 October 2024 13:40:03 -0400 (0:00:00.048) 0:00:04.949 ****** changed: [managed-node3] => { "arguments": { "cachedir": "/var/cache/tang", "force": false, "keydir": "/var/db/tang", "keygen": "/usr/libexec/tangd-keygen", "keys_to_deploy_dir": null, "state": "keys-created", "update": "/usr/libexec/tangd-update" }, "changed": true, "state": "keys-created" } TASK [fedora.linux_system_roles.nbde_server : Perform key management (fetch/deploy) tasks] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:26 Saturday 12 October 2024 13:40:04 -0400 (0:00:00.514) 0:00:05.464 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "(nbde_server_fetch_keys | bool) or (nbde_server_deploy_keys | bool)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Manage firewall and SELinux for port] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:30 Saturday 12 October 2024 13:40:04 -0400 (0:00:00.031) 0:00:05.495 ****** included: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml for managed-node3 TASK [Ensure tang port is labeled tangd_port_t for SELinux] ******************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:2 Saturday 12 October 2024 13:40:04 -0400 (0:00:00.019) 0:00:05.515 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "nbde_server_manage_selinux | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Stat the tangd custom port systemd directory] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:14 Saturday 12 October 2024 13:40:04 -0400 (0:00:00.028) 0:00:05.544 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1728754799.6445153, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1728754800.5175111, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 658505922, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0775", "mtime": 1728754800.5175111, "nlink": 2, "path": "/etc/systemd/system/tangd.socket.d", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 28, "uid": 0, "version": "447156668", "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.nbde_server : Get a list of files in the tangd custom directory] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:19 Saturday 12 October 2024 13:40:04 -0400 (0:00:00.318) 0:00:05.862 ****** ok: [managed-node3] => { "changed": false, "examined": 1, "files": [ { "atime": 1728754800.512511, "ctime": 1728754800.518511, "dev": 51713, "gid": 0, "gr_name": "root", "inode": 662700227, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mode": "0664", "mtime": 1728754800.1615129, "nlink": 1, "path": "/etc/systemd/system/tangd.socket.d/override2.conf", "pw_name": "root", "rgrp": true, "roth": true, "rusr": true, "size": 28, "uid": 0, "wgrp": true, "woth": false, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } ], "matched": 1, "skipped_paths": {} } MSG: All paths examined TASK [fedora.linux_system_roles.nbde_server : Manage tangd custom port systemd directory] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:35 Saturday 12 October 2024 13:40:05 -0400 (0:00:00.410) 0:00:06.273 ****** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0775", "owner": "root", "path": "/etc/systemd/system/tangd.socket.d", "secontext": "unconfined_u:object_r:systemd_unit_file_t:s0", "size": 28, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.nbde_server : Creates the file with the port entry that we want tangd to listen to] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:44 Saturday 12 October 2024 13:40:05 -0400 (0:00:00.362) 0:00:06.635 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "nbde_server_port | int != 80", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Set flag to to tell main that the port has changed] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:53 Saturday 12 October 2024 13:40:05 -0400 (0:00:00.030) 0:00:06.666 ****** ok: [managed-node3] => { "ansible_facts": { "__nbde_server_port_changed": false }, "changed": false } TASK [Ensure the desired port is added to firewalld] *************************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:57 Saturday 12 October 2024 13:40:05 -0400 (0:00:00.021) 0:00:06.688 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "nbde_server_manage_firewall | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Reload the daemons so the new changes take effect] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:34 Saturday 12 October 2024 13:40:05 -0400 (0:00:00.051) 0:00:06.739 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__nbde_server_port_changed | d(false) | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Ensure required services are enabled and at the right state] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:39 Saturday 12 October 2024 13:40:05 -0400 (0:00:00.057) 0:00:06.796 ****** ok: [managed-node3] => (item=tangd.socket) => { "ansible_loop_var": "item", "changed": false, "enabled": true, "item": "tangd.socket", "name": "tangd.socket", "state": "started", "status": { "Accept": "yes", "AccessSELinuxContext": "system_u:object_r:tangd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-10-12 13:39:23 EDT", "ActiveEnterTimestampMonotonic": "368185276", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "sysinit.target system.slice systemd-journald.socket", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-10-12 13:39:23 EDT", "AssertTimestampMonotonic": "368174702", "Backlog": "4096", "Before": "sockets.target shutdown.target", "BindIPv6Only": "default", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "Broadcast": "no", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "4587000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "no", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-10-12 13:39:23 EDT", "ConditionTimestampMonotonic": "368174698", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/tangd.socket", "ControlGroupId": "4137", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DeferAcceptUSec": "0", "Delegate": "no", "Description": "Tang Server socket", "DevicePolicy": "auto", "DirectoryMode": "0755", "Documentation": "\"man:tang(8)\"", "DynamicUser": "no", "ExecStartPre": "{ path=/usr/bin/chown ; argv[]=/usr/bin/chown -R tang:tang /var/db/tang ; ignore_errors=yes ; start_time=[Sat 2024-10-12 13:39:23 EDT] ; stop_time=[Sat 2024-10-12 13:39:23 EDT] ; pid=7218 ; code=exited ; status=0 }", "FailureAction": "none", "FileDescriptorName": "tangd.socket", "FinalKillSignal": "9", "FlushPending": "no", "FragmentPath": "/usr/lib/systemd/system/tangd.socket", "FreeBind": "no", "FreezerState": "running", "GID": "[not set]", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "IPTOS": "-1", "IPTTL": "-1", "Id": "tangd.socket", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2024-10-12 13:39:23 EDT", "InactiveExitTimestampMonotonic": "368180181", "InvocationID": "727b3d2906354e64a2e19a2b74a21d62", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeepAlive": "no", "KeepAliveIntervalUSec": "0", "KeepAliveProbes": "0", "KeepAliveTimeUSec": "0", "KeyringMode": "shared", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13964", "LimitNPROCSoft": "13964", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13964", "LimitSIGPENDINGSoft": "13964", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "Listen": "[::]:80 (Stream)", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "Mark": "-1", "MaxConnections": "64", "MaxConnectionsPerSource": "0", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "8192", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MessageQueueMaxMessages": "0", "MessageQueueMessageSize": "0", "MountAPIVFS": "no", "NAccepted": "0", "NConnections": "0", "NRefused": "0", "NUMAPolicy": "n/a", "Names": "tangd.socket", "NeedDaemonReload": "no", "Nice": "0", "NoDelay": "no", "NoNewPrivileges": "no", "NonBlocking": "no", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PassCredentials": "no", "PassPacketInfo": "no", "PassSecurity": "no", "Perpetual": "no", "PipeSize": "0", "Priority": "-1", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "ReceiveBuffer": "0", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemoveIPC": "no", "RemoveOnStop": "no", "Requires": "system.slice sysinit.target", "RestartKillSignal": "15", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "ReusePort": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "SameProcessGroup": "no", "SecureBits": "0", "SendBuffer": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "SocketMode": "0666", "SocketProtocol": "0", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sat 2024-10-12 13:39:23 EDT", "StateChangeTimestampMonotonic": "368185276", "StateDirectoryMode": "0755", "StopWhenUnneeded": "no", "SubState": "listening", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "0", "TasksMax": "22342", "TimeoutCleanUSec": "infinity", "TimeoutUSec": "1min 30s", "TimerSlackNSec": "50000", "Timestamping": "off", "Transient": "no", "Transparent": "no", "TriggerLimitBurst": "200", "TriggerLimitIntervalUSec": "2s", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "sockets.target", "WatchdogSignal": "6", "Writable": "no" } } TASK [Check tangd socket dir] ************************************************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:33 Saturday 12 October 2024 13:40:06 -0400 (0:00:01.090) 0:00:07.887 ****** ok: [managed-node3] => { "changed": false, "failed_when_result": false, "stat": { "atime": 1728754805.2174883, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1728754800.5175111, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 658505922, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0775", "mtime": 1728754800.5175111, "nlink": 2, "path": "/etc/systemd/system/tangd.socket.d", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 28, "uid": 0, "version": "447156668", "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [Check custom file] ******************************************************* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:39 Saturday 12 October 2024 13:40:07 -0400 (0:00:00.317) 0:00:08.205 ****** ok: [managed-node3] => { "changed": false, "failed_when_result": false, "stat": { "atime": 1728754806.8214805, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "05987691cc309e84627f31fa0d1680a3b3b2c4b2", "ctime": 1728754800.518511, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 662700227, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0664", "mtime": 1728754800.1615129, "nlink": 1, "path": "/etc/systemd/system/tangd.socket.d/override2.conf", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 28, "uid": 0, "version": "3714202560", "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify role reported no changes] ***************************************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:45 Saturday 12 October 2024 13:40:08 -0400 (0:00:00.867) 0:00:09.073 ****** ok: [managed-node3] => { "changed": false } MSG: All assertions passed TASK [Run the role with a custom port] ***************************************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:49 Saturday 12 October 2024 13:40:08 -0400 (0:00:00.017) 0:00:09.090 ****** included: fedora.linux_system_roles.nbde_server for managed-node3 TASK [fedora.linux_system_roles.nbde_server : Set version specific variables] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main.yml:6 Saturday 12 October 2024 13:40:08 -0400 (0:00:00.026) 0:00:09.117 ****** included: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.nbde_server : Ensure ansible_facts used by role] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:2 Saturday 12 October 2024 13:40:08 -0400 (0:00:00.017) 0:00:09.134 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__nbde_server_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Check if system is ostree] ******* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:10 Saturday 12 October 2024 13:40:08 -0400 (0:00:00.038) 0:00:09.173 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __nbde_server_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Set flag to indicate system is ostree] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:15 Saturday 12 October 2024 13:40:08 -0400 (0:00:00.018) 0:00:09.191 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __nbde_server_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Set platform/version specific variables] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:19 Saturday 12 October 2024 13:40:08 -0400 (0:00:00.015) 0:00:09.207 ****** ok: [managed-node3] => { "ansible_facts": { "__nbde_server_cachedir": "/var/cache/tang", "__nbde_server_group": "tang", "__nbde_server_keydir": "/var/db/tang", "__nbde_server_keygen": "/usr/libexec/tangd-keygen", "__nbde_server_packages": [ "tang" ], "__nbde_server_services": [ "tangd.socket" ], "__nbde_server_update": "/usr/libexec/tangd-update", "__nbde_server_user": "tang" }, "ansible_included_var_files": [ "/tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/vars/default.yml" ], "changed": false } TASK [fedora.linux_system_roles.nbde_server : Include the appropriate provider tasks] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main.yml:9 Saturday 12 October 2024 13:40:08 -0400 (0:00:00.021) 0:00:09.228 ****** included: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml for managed-node3 TASK [fedora.linux_system_roles.nbde_server : Ensure tang is installed] ******** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:2 Saturday 12 October 2024 13:40:08 -0400 (0:00:00.020) 0:00:09.249 ****** ok: [managed-node3] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: tang TASK [fedora.linux_system_roles.nbde_server : Ensure keys are rotated] ********* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:8 Saturday 12 October 2024 13:40:09 -0400 (0:00:01.566) 0:00:10.816 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "nbde_server_rotate_keys | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Ensure we have keys] ************* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:17 Saturday 12 October 2024 13:40:09 -0400 (0:00:00.040) 0:00:10.857 ****** ok: [managed-node3] => { "arguments": { "cachedir": "/var/cache/tang", "force": false, "keydir": "/var/db/tang", "keygen": "/usr/libexec/tangd-keygen", "keys_to_deploy_dir": null, "state": "keys-created", "update": "/usr/libexec/tangd-update" }, "changed": false, "state": "keys-created" } TASK [fedora.linux_system_roles.nbde_server : Perform key management (fetch/deploy) tasks] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:26 Saturday 12 October 2024 13:40:10 -0400 (0:00:00.355) 0:00:11.212 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "(nbde_server_fetch_keys | bool) or (nbde_server_deploy_keys | bool)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Manage firewall and SELinux for port] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:30 Saturday 12 October 2024 13:40:10 -0400 (0:00:00.029) 0:00:11.241 ****** included: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml for managed-node3 TASK [Ensure tang port is labeled tangd_port_t for SELinux] ******************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:2 Saturday 12 October 2024 13:40:10 -0400 (0:00:00.019) 0:00:11.261 ****** redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.selinux to ansible.posix.selinux redirecting (type: modules) ansible.builtin.seboolean to ansible.posix.seboolean included: fedora.linux_system_roles.selinux for managed-node3 TASK [fedora.linux_system_roles.selinux : Set ansible_facts required by role and install packages] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:2 Saturday 12 October 2024 13:40:10 -0400 (0:00:00.067) 0:00:11.328 ****** included: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml for managed-node3 TASK [fedora.linux_system_roles.selinux : Ensure ansible_facts used by role] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:2 Saturday 12 October 2024 13:40:10 -0400 (0:00:00.024) 0:00:11.352 ****** ok: [managed-node3] TASK [fedora.linux_system_roles.selinux : Ensure SELinux packages] ************* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/set_facts_packages.yml:7 Saturday 12 October 2024 13:40:10 -0400 (0:00:00.462) 0:00:11.815 ****** included: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml for managed-node3 TASK [fedora.linux_system_roles.selinux : Check if system is ostree] *********** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:5 Saturday 12 October 2024 13:40:10 -0400 (0:00:00.039) 0:00:11.855 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.selinux : Set flag to indicate system is ostree] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:10 Saturday 12 October 2024 13:40:11 -0400 (0:00:00.345) 0:00:12.201 ****** ok: [managed-node3] => { "ansible_facts": { "__selinux_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:17 Saturday 12 October 2024 13:40:11 -0400 (0:00:00.021) 0:00:12.223 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.selinux : Set flag if transactional-update exists] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:22 Saturday 12 October 2024 13:40:11 -0400 (0:00:00.319) 0:00:12.542 ****** ok: [managed-node3] => { "ansible_facts": { "__selinux_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Install SELinux python2 tools] ******* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:26 Saturday 12 October 2024 13:40:11 -0400 (0:00:00.020) 0:00:12.563 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "ansible_python_version is version('3', '<')", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 Saturday 12 October 2024 13:40:11 -0400 (0:00:00.015) 0:00:12.578 ****** ok: [managed-node3] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: python3-libselinux python3-policycoreutils TASK [fedora.linux_system_roles.selinux : Install SELinux python3 tools] ******* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:46 Saturday 12 October 2024 13:40:13 -0400 (0:00:01.596) 0:00:14.175 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "ansible_os_family == \"Suse\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Install SELinux tool semanage] ******* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 Saturday 12 October 2024 13:40:13 -0400 (0:00:00.016) 0:00:14.191 ****** changed: [managed-node3] => { "changed": true, "rc": 0, "results": [ "Installed: policycoreutils-python-utils-3.6-2.1.el9.noarch" ] } lsrpackages: policycoreutils-python-utils TASK [fedora.linux_system_roles.selinux : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:72 Saturday 12 October 2024 13:40:15 -0400 (0:00:02.330) 0:00:16.522 ****** skipping: [managed-node3] => { "false_condition": "__selinux_is_transactional | d(false)" } TASK [fedora.linux_system_roles.selinux : Reboot transactional update systems] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:77 Saturday 12 October 2024 13:40:15 -0400 (0:00:00.034) 0:00:16.557 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Fail if reboot is needed and not set] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:82 Saturday 12 October 2024 13:40:15 -0400 (0:00:00.031) 0:00:16.589 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__selinux_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Refresh facts] *********************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:89 Saturday 12 October 2024 13:40:15 -0400 (0:00:00.030) 0:00:16.619 ****** ok: [managed-node3] TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if enabled] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:5 Saturday 12 October 2024 13:40:16 -0400 (0:00:00.732) 0:00:17.352 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "ansible_selinux.status == \"enabled\" and (selinux_state or selinux_policy)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set permanent SELinux state if disabled] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:13 Saturday 12 October 2024 13:40:16 -0400 (0:00:00.012) 0:00:17.364 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "ansible_selinux.status == \"disabled\" and selinux_state", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set selinux_reboot_required] ********* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:21 Saturday 12 October 2024 13:40:16 -0400 (0:00:00.011) 0:00:17.376 ****** ok: [managed-node3] => { "ansible_facts": { "selinux_reboot_required": false }, "changed": false } TASK [fedora.linux_system_roles.selinux : Fail if reboot is required] ********** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:25 Saturday 12 October 2024 13:40:16 -0400 (0:00:00.037) 0:00:17.414 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "selinux_reboot_required", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Warn if SELinux is disabled] ********* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:30 Saturday 12 October 2024 13:40:16 -0400 (0:00:00.011) 0:00:17.426 ****** skipping: [managed-node3] => { "false_condition": "ansible_selinux.status == \"disabled\"" } TASK [fedora.linux_system_roles.selinux : Drop all local modifications] ******** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:35 Saturday 12 October 2024 13:40:16 -0400 (0:00:00.011) 0:00:17.438 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "selinux_all_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux boolean local modifications] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:43 Saturday 12 October 2024 13:40:16 -0400 (0:00:00.029) 0:00:17.467 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "selinux_booleans_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux file context local modifications] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:48 Saturday 12 October 2024 13:40:16 -0400 (0:00:00.029) 0:00:17.497 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "selinux_fcontexts_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux port local modifications] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:53 Saturday 12 October 2024 13:40:16 -0400 (0:00:00.030) 0:00:17.527 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "selinux_ports_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Purge all SELinux login local modifications] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:58 Saturday 12 October 2024 13:40:16 -0400 (0:00:00.031) 0:00:17.558 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "selinux_logins_purge | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Set SELinux booleans] **************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:63 Saturday 12 October 2024 13:40:16 -0400 (0:00:00.031) 0:00:17.590 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set SELinux file contexts] *********** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:74 Saturday 12 October 2024 13:40:16 -0400 (0:00:00.011) 0:00:17.601 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Set an SELinux label on a port] ****** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:87 Saturday 12 October 2024 13:40:16 -0400 (0:00:00.013) 0:00:17.615 ****** changed: [managed-node3] => (item={'ports': 7500, 'proto': 'tcp', 'setype': 'tangd_port_t', 'state': 'present', 'local': True}) => { "__selinux_item": { "local": true, "ports": 7500, "proto": "tcp", "setype": "tangd_port_t", "state": "present" }, "ansible_loop_var": "__selinux_item", "changed": true, "ports": [ "7500" ], "proto": "tcp", "setype": "tangd_port_t", "state": "present" } TASK [fedora.linux_system_roles.selinux : Set linux user to SELinux user mapping] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:99 Saturday 12 October 2024 13:40:22 -0400 (0:00:05.649) 0:00:23.264 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Get SELinux modules facts] *********** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112 Saturday 12 October 2024 13:40:22 -0400 (0:00:00.011) 0:00:23.276 ****** ok: [managed-node3] => { "ansible_facts": { "selinux_checksums": true, "selinux_installed_modules": { "abrt": { "100": { "checksum": "sha256:b51e3b6f704ac00c5bba8ac275f854f6d53a5e3a4eb5c5cbbc4b11004feca510", "enabled": 1 } }, "accountsd": { "100": { "checksum": "sha256:00a0d1af3601693070f723e0deaea1ee28734bde5ef4895843a1ba59cd7ceaa5", "enabled": 1 } }, "acct": { "100": { "checksum": "sha256:9cde24137141f8c2a8fcee6dd15537f0f54198da14360a30655a0606e4ba2818", "enabled": 1 } }, "afs": { "100": { "checksum": "sha256:93c409c37386aacafd2d288f053c13e2a092696358aa991db322507eb9f4838b", "enabled": 1 } }, "afterburn": { "100": { "checksum": "sha256:8c0ef816b475cee987383fac53e50be5350b237033c893df8e267579e35eb726", "enabled": 1 } }, "aiccu": { "100": { "checksum": "sha256:1433f430bc89a874d46961861e1da8520158d6794a0b778e86b88fe13f71798a", "enabled": 1 } }, "aide": { "100": { "checksum": "sha256:35a074f33aee562b559a5cb76637dc4602a70c70f573ec4efe3a4274cb3e3a75", "enabled": 1 } }, "ajaxterm": { "100": { "checksum": "sha256:a2afd368cdffc8ec7c2dccd978bda04ec9bcec8cc1d133521e526dbe196c5f90", "enabled": 1 } }, "alsa": { "100": { "checksum": "sha256:be7eaf8e5da7b69d7d41422771e7e3692d41148f3e3c4e78b7b03d8117ddb2c6", "enabled": 1 } }, "amanda": { "100": { "checksum": "sha256:84287ea4da0ed7b94b142d7b494666232e7a25209471ff689c7806866ce93f92", "enabled": 1 } }, "amtu": { "100": { "checksum": "sha256:ee038524634638e4842fd478cf60861765476498940c7516ced038c6a9161282", "enabled": 1 } }, "anaconda": { "100": { "checksum": "sha256:4292aa3daa6a027612e80073403b822d55297b812dd9455e2842d8a89a563e25", "enabled": 1 } }, "antivirus": { "100": { "checksum": "sha256:75e3f196fca58dc489cf92c4fa8be3decec0bcedcb2008cb7a83df5bc00d5850", "enabled": 1 } }, "apache": { "100": { "checksum": "sha256:60e843a42569cd6efeb7da329e9593960e752c111dfa1df6874fe22f26d0a3a1", "enabled": 1 } }, "apcupsd": { "100": { "checksum": "sha256:a00445c04d0003fce006ff5529e988671c09286372f803b2545bebc5302b71f4", "enabled": 1 } }, "apm": { "100": { "checksum": "sha256:083e097b9b3ac106fb9e9b0fc7b5a830b70a85a321ee913e0a3ce5a262d13779", "enabled": 1 } }, "application": { "100": { "checksum": "sha256:46d9417dcf9bb31c077dc7ad25d8ac315fed23a2f0e69b00460c449534932765", "enabled": 1 } }, "arpwatch": { "100": { "checksum": "sha256:286406a4369a79a0e868d0e73699ee829431f534733e527bf91b6363486b00ed", "enabled": 1 } }, "asterisk": { "100": { "checksum": "sha256:3c093f028ebb0a01031a7a03233c24fb3ba41c693d3246b3275337289b20251c", "enabled": 1 } }, "auditadm": { "100": { "checksum": "sha256:87a032f064920c8036648d1fde7f75dc96f7ea9f9e514b32a24a91acce93e27c", "enabled": 1 } }, "authconfig": { "100": { "checksum": "sha256:be2114af9cfa429328d94fd44aeb34a5f94a6dab76deb20f3b9cea7182dd1343", "enabled": 1 } }, "authlogin": { "100": { "checksum": "sha256:61b6b0c427f5c86b16f77231ce3aa989c8ef7b40bcefef99d8f7518d82bd8bc9", "enabled": 1 } }, "automount": { "100": { "checksum": "sha256:7c1bc02c72f6a9b86f6cb6b4e8307428346706171510fb17f1e09bf1c7600d35", "enabled": 1 } }, "avahi": { "100": { "checksum": "sha256:ce150ead441c9ccbe062a9f7b868ff893eb427ff6a7bee500977699c42b89286", "enabled": 1 } }, "awstats": { "100": { "checksum": "sha256:70fa46fcee1c327fbb59d24b26bf92aeb641705bb6821d29ee9a8c213b5822b0", "enabled": 1 } }, "bacula": { "100": { "checksum": "sha256:7ff04b28e1684fa4f655051692015501aa0f0ad46edd79cadcdf7020fa2e66aa", "enabled": 1 } }, "base": { "100": { "checksum": "sha256:304a21cb0d4fa72d825fc9137d7d0bc3133448f952d6bedc2753bc721dc5fac0", "enabled": 1 } }, "bcfg2": { "100": { "checksum": "sha256:a38aef4d9df7de368ce8f26c374fdd7c2d6c7f3a12b07c706265f35b40de7308", "enabled": 1 } }, "bind": { "100": { "checksum": "sha256:03544e87d5e1caca5b6b69fb627b482f44bfa63f49cf5a08245ff08fb9982809", "enabled": 1 } }, "bitlbee": { "100": { "checksum": "sha256:eb7f9de13d74d31a934e7c58cf1a34b428d5695ad1f552d43af73ddfdf15f3e3", "enabled": 1 } }, "blkmapd": { "100": { "checksum": "sha256:b75ceb3e1f47bed84573b3d82d4dbe9023adb0a9a40fc2ddba393addd5f9a1d8", "enabled": 1 } }, "blueman": { "100": { "checksum": "sha256:a2645f50db0b15fac85915686e84d9e1ad347d72d77790f67f9e630d73ecaa13", "enabled": 1 } }, "bluetooth": { "100": { "checksum": "sha256:370a05f9a92e9a3359ed79b853c1719b594592cd5541c56023a92bbc87764270", "enabled": 1 } }, "boinc": { "100": { "checksum": "sha256:354fd26e2ef0e4c35edb3aa0bb068ee269f9d49f8180e0ebca24d06039aa8e0c", "enabled": 1 } }, "boltd": { "100": { "checksum": "sha256:39592577c657fe9b8720c7c8fc0a382b56377cc6c8eff3d3068d276552467e4f", "enabled": 1 } }, "boothd": { "100": { "checksum": "sha256:9f65fcbe298e95e21bc82bd03776022a45823c99abaf4e684168248f43672519", "enabled": 1 } }, "bootloader": { "100": { "checksum": "sha256:21a4ec8c7068672a02d49a4ca8721a7d3e661d1c704908e04b7abc9c955e2343", "enabled": 1 } }, "bootupd": { "100": { "checksum": "sha256:c707603b2556dd53524e160e5c66150c18ffc0b4668b2404622282cd2925ddc7", "enabled": 1 } }, "brctl": { "100": { "checksum": "sha256:36617fd6db4ebf3850675aeea3e096fa59a4bfdcb64613f93f8651b6f61db3cb", "enabled": 1 } }, "brltty": { "100": { "checksum": "sha256:5c24db4b697cf2406a8c713240ffb78d953ad090e6287b842f86dffe1290ec26", "enabled": 1 } }, "bugzilla": { "100": { "checksum": "sha256:496bef2bede30915da26b4136fb2f34becdcbc87925e0d44564667901b75d658", "enabled": 1 } }, "bumblebee": { "100": { "checksum": "sha256:0e7317ff234ccbf01bac4d63543ed7babffd63081d4f64100c2bc5f35c3d288e", "enabled": 1 } }, "cachefilesd": { "100": { "checksum": "sha256:3246918462bd57ae9d412e4177a59735624f031c7c5bd79254d0e4ab8c799045", "enabled": 1 } }, "calamaris": { "100": { "checksum": "sha256:22eee16dbfbf19e6c7e3e8a22ecf7794a4667b9c0fe38de1b7892939e5383a9a", "enabled": 1 } }, "callweaver": { "100": { "checksum": "sha256:db38c31ae173d049f813fdcc8019651b1ea662ec270fa27943d851bb4e6fe951", "enabled": 1 } }, "canna": { "100": { "checksum": "sha256:e752c21e4c76ead1a95b3858a36df5c9fe1dcfc2fa72b6e3337db501b255ed14", "enabled": 1 } }, "ccs": { "100": { "checksum": "sha256:2ed6f511d59167652141d928236900e08ac58f0347d5b13e384282f0c9d4bd7c", "enabled": 1 } }, "cdrecord": { "100": { "checksum": "sha256:eb4506711c4aa4449d76ceff06bd4728a67b150fa6daa449329af99d55a7e05f", "enabled": 1 } }, "certmaster": { "100": { "checksum": "sha256:f70165cc07c9c0edbada60c282cfd7b059e1c4dad3fd296c77238bfa7db74519", "enabled": 1 } }, "certmonger": { "100": { "checksum": "sha256:0ad5ac88aac27c3f65958235d1f71ae11ffdbbf0e30f2cd4a45fe0438f8b80c2", "enabled": 1 } }, "certwatch": { "100": { "checksum": "sha256:85e1d3dec4e26d58633e3a66dea23193fee975435de0432a33d9e3f0003148e3", "enabled": 1 } }, "cfengine": { "100": { "checksum": "sha256:0538edf6792efb191ae48f6f7ea825e7a0300b3f5941588a231e2e0ed84c3ef4", "enabled": 1 } }, "cgroup": { "100": { "checksum": "sha256:8a4ca1608501b748500da1cca0611feca68f892b6a0b931eff8391eff2880b83", "enabled": 1 } }, "chrome": { "100": { "checksum": "sha256:d778deea4a45cf38804a67181906680a20b1f94f779096b4b291658a3f4f7797", "enabled": 1 } }, "chronyd": { "100": { "checksum": "sha256:4472d378d0cca0cac8ee119b40beac9a1528f7e8afe3835fc868d9ee50e857ef", "enabled": 1 } }, "cinder": { "100": { "checksum": "sha256:65a156504f35a84298492048f07a967c74cca37467b5519175f0132356a4f3c0", "enabled": 1 } }, "cipe": { "100": { "checksum": "sha256:9aea31e33e0bbb1343acc95c4b96034c0e21cfc8098a6d9239c6fb2ddd964edd", "enabled": 1 } }, "clock": { "100": { "checksum": "sha256:5e2443a8b037a9f47d1082255f61bb3bc94ea76727732bec8ca477ed0f3d1cef", "enabled": 1 } }, "clogd": { "100": { "checksum": "sha256:ffb0c7d931ae1d6bbd575684ae14dbcc542b1a589a5c70235143b6494dbce74e", "enabled": 1 } }, "cloudform": { "100": { "checksum": "sha256:c5a2c5234b842e5d65fe5aa739e2d30ae18b7a4398fd02ec03ffb5db8b0022f5", "enabled": 1 } }, "cmirrord": { "100": { "checksum": "sha256:e55c52b2d9cca85a56a01f9a862ff089a008deb2e23d8dc36bf4db067f9d63d2", "enabled": 1 } }, "cobbler": { "100": { "checksum": "sha256:abdf9b12e79823ecdc818a9aaec4d6e4253f02e8c59b5c320677ae99096b8c8d", "enabled": 1 } }, "collectd": { "100": { "checksum": "sha256:fd6d1f0d8446afdd95c5c88850751a9def7fb10d8efef9113f7808431296374b", "enabled": 1 } }, "colord": { "100": { "checksum": "sha256:087d0f4a10495e4186632d167181b302e35abaaa4aee055628c907e4feb5e60f", "enabled": 1 } }, "comsat": { "100": { "checksum": "sha256:c7d8f1c628aba8babecb74c405389e5508768fce1c19a1a3ffb1f7e2cf21a9bc", "enabled": 1 } }, "condor": { "100": { "checksum": "sha256:a64ce927d71bdb89976bb02ee81c1c0cd362ce79760d529864deb95d78435a81", "enabled": 1 } }, "conman": { "100": { "checksum": "sha256:8610bf4d3c1c1fe1f037c0149f5126ed724947671d3ce2f43ce03318e5bbd6e9", "enabled": 1 } }, "conntrackd": { "100": { "checksum": "sha256:38e74734b46d764bdf548bae5eb0322cb3efab0139b2c57c6293ad4270b2cd72", "enabled": 1 } }, "consolekit": { "100": { "checksum": "sha256:08fd88b15eadd335fb6909b0e19ccfc7fce465a0e1adf66ea23375a528c02a2b", "enabled": 1 } }, "coreos_installer": { "100": { "checksum": "sha256:7aa6f4a2c4b409fb85dbc8e9e1b44a3166437ee81341247655d783b4fc8a3538", "enabled": 1 } }, "couchdb": { "100": { "checksum": "sha256:c2b8357f1cbba4813f68e96b8683bf56117ea18a08f509c47a9007d6dd7e37d0", "enabled": 1 } }, "courier": { "100": { "checksum": "sha256:bee8013d915739bd1e9a8ccb7d6936aa2d05ed68358b0c5624f9b7b20327a489", "enabled": 1 } }, "cpucontrol": { "100": { "checksum": "sha256:c62f78f9a781fdb8c455b4e1c444206466a7a572e499360a40ea752d8eebb332", "enabled": 1 } }, "cpufreqselector": { "100": { "checksum": "sha256:2155538ddced6cc2efbc8c54879c34b385aa1407ea62157644f787ea998de988", "enabled": 1 } }, "cpuplug": { "100": { "checksum": "sha256:6287b50c36817f7293217a030803bf3d62c707699a0e529c7d067989248ddcf8", "enabled": 1 } }, "cron": { "100": { "checksum": "sha256:5b654bad71770454ff600a55533f4da984770414d0cb1541e5d6035b73e90544", "enabled": 1 } }, "ctdb": { "100": { "checksum": "sha256:867a24cfaf2c6935e7c2f0f85e3f9b47de8126ad509db5f7aecdf572492a982c", "enabled": 1 } }, "cups": { "100": { "checksum": "sha256:acfc1b83372629d838cd2e1a291d1e88ad352fb5577eee01c1bcf460d8444883", "enabled": 1 } }, "cvs": { "100": { "checksum": "sha256:b7034b37cb6a943ec0e0b1122fb168440623b525b9d9a871c8f0f80f01427b0e", "enabled": 1 } }, "cyphesis": { "100": { "checksum": "sha256:6e83813cb5e03c15e648a7e798aae3e9c94f9a54ad542e971f176ab9271bc24e", "enabled": 1 } }, "cyrus": { "100": { "checksum": "sha256:d1da2a6beba9e0cd0bc492e8d8cafc7a9d3f1b13342967badb49fba668f6a985", "enabled": 1 } }, "daemontools": { "100": { "checksum": "sha256:1c94c8a99b9623387df88f2693f049682b4d4fc392904afab5bc9ba7a9ccb66a", "enabled": 1 } }, "dbadm": { "100": { "checksum": "sha256:0e9b5b78865e451e40a8f55b406801e55e98d4812c95f37130874438831a9145", "enabled": 1 } }, "dbskk": { "100": { "checksum": "sha256:6d9c565c834b41796ed712b843c7bd664fffafb667765c8d857432498388f9ff", "enabled": 1 } }, "dbus": { "100": { "checksum": "sha256:49b7477e868249bf17c6b5467b705d12e9ddc87bf2060898e7529a646a0f7e34", "enabled": 1 } }, "dcc": { "100": { "checksum": "sha256:e223c213d51685baf4d3be0c5c4805482c6954bd890d803062ddba896654b483", "enabled": 1 } }, "ddclient": { "100": { "checksum": "sha256:51d8f84dc3a8acb26b725e61aed99a1fdf79ea9dbc63fc7200a8d4697ff4c9c1", "enabled": 1 } }, "denyhosts": { "100": { "checksum": "sha256:723f8c7cb84bd0f48a15663d09d67eb67be7f89b275c9ab2966c88ca8d8ac18a", "enabled": 1 } }, "devicekit": { "100": { "checksum": "sha256:9a380e1b546cf2d8eab55a099515bccd1b25d2dc0d0ba28666e7b95f9d665989", "enabled": 1 } }, "dhcp": { "100": { "checksum": "sha256:1735881cc778e961bd742e846686425ec4014676e507460c85158f83ffc131ad", "enabled": 1 } }, "dictd": { "100": { "checksum": "sha256:a4988c85c75b5fd1b4a38b6335bc3fb962c0e78693042fc35785ce68ad5cb371", "enabled": 1 } }, "dirsrv": { "100": { "checksum": "sha256:43becf8b756ebd6e31e51c71f8963de116feaff35ddc0d6f15c4cf9da79b9c73", "enabled": 1 } }, "dirsrv-admin": { "100": { "checksum": "sha256:4c8ca106fff85e11e0a1829ed2ac3f243e16e91d7b782b98ef8999501bf0ba95", "enabled": 1 } }, "dmesg": { "100": { "checksum": "sha256:0dc55ec34569ba57e832be6e33834acf3055e707f8a04e644a517fe6c25fbe0d", "enabled": 1 } }, "dmidecode": { "100": { "checksum": "sha256:9a4b3d93e6fe7a710d739a682991df8e4fb3bfa5c4408f80b7bcf8bded0b3f91", "enabled": 1 } }, "dnsmasq": { "100": { "checksum": "sha256:c835e6157080af15bad60e3175ec73411ecc09c79b1f24488d38dbb43e49c524", "enabled": 1 } }, "dnssec": { "100": { "checksum": "sha256:77d3836aae67432fe5aaad6cf960c57c4c540253b884d0da7ce24f527f480bbb", "enabled": 1 } }, "dovecot": { "100": { "checksum": "sha256:dba3ca8b279efbe4c9d41b20c94f4eaaf69b63234781b6bffc7a0186e761d972", "enabled": 1 } }, "drbd": { "100": { "checksum": "sha256:b2a6e422462c98deab9be1ebbd7b4c0e541652cef6544eb817890b59691f6f63", "enabled": 1 } }, "dspam": { "100": { "checksum": "sha256:a0e4a3f431978461ff0c6606dece39299123c11c0c50dd07dec0523b087e6d69", "enabled": 1 } }, "entropyd": { "100": { "checksum": "sha256:ae9d99bc78f2f39ed963de93eacb8d15e06f9695542787fd88dd34ae5c417030", "enabled": 1 } }, "exim": { "100": { "checksum": "sha256:df1cfbb1ab78dbc0de7189c60173c3e0fa87640761050a6ee1915ad7b268f937", "enabled": 1 } }, "fail2ban": { "100": { "checksum": "sha256:9050e1e27a67f06989fd7b1ae77b9c0086f4017aa6f4b3123e4d40b4c4be24ef", "enabled": 1 } }, "fcoe": { "100": { "checksum": "sha256:de1005aba353d2cd82e7d2542b9f0f22a537c0836598dc0d7b3fc739b0a1512d", "enabled": 1 } }, "fdo": { "100": { "checksum": "sha256:0040a417e0edd66d8c3ebbc52a7b9a61a114724ef1b47f41e7d0e0dd9f496667", "enabled": 1 } }, "fetchmail": { "100": { "checksum": "sha256:052196a9650b28088637ad2c1bc2e3e18c9ebb26b9d81fc22d06f9383448e082", "enabled": 1 } }, "finger": { "100": { "checksum": "sha256:7e077f2f9cbb7a67e901983f068e542f2ea7bf8fbd32398624006f2ec3721469", "enabled": 1 } }, "firewalld": { "100": { "checksum": "sha256:074ef28f63316a886aa80247be41c3f4eb0c4ab24be2538204a8473c206377dd", "enabled": 1 } }, "firewallgui": { "100": { "checksum": "sha256:9123c9d8a6246155165e536233f76373dce5ed442b693d3adfad2cfb0d045d14", "enabled": 1 } }, "firstboot": { "100": { "checksum": "sha256:bd1c559e70c0fa8a6dd0e9e541410e8def49a3f8769e609a7371bcae87cbc7a1", "enabled": 1 } }, "fprintd": { "100": { "checksum": "sha256:651a0158d860694e2f7e3c2ff5842cb1167edd212f207d25fbd048cc0bca8b1e", "enabled": 1 } }, "freeipmi": { "100": { "checksum": "sha256:b0055e597efbe80253d626f80a865cb814f393fa91e66afd4458d436fa896318", "enabled": 1 } }, "freqset": { "100": { "checksum": "sha256:e8a220c3eef785816671acd81eb445e324d475a084fbd06263b797d578d02f3c", "enabled": 1 } }, "fstools": { "100": { "checksum": "sha256:127cb7df805e3a46359a5207b063f90c1d19e6d3198182767ed70779b4b28221", "enabled": 1 } }, "ftp": { "100": { "checksum": "sha256:9902176e4edcbecebd0f6ac300b28794668de3d4540a9ae5be717f396b0292be", "enabled": 1 } }, "fwupd": { "100": { "checksum": "sha256:c052a6835e70d9c0e051e979e81764ebb89f6f133a440db25d1dde163ffa48c4", "enabled": 1 } }, "games": { "100": { "checksum": "sha256:45772c12ebf653bb9a623771101b99ab2ffe914d03370ebfbe3b0912ca0d6b8a", "enabled": 1 } }, "gdomap": { "100": { "checksum": "sha256:1e46eff6833500ac418eda8676b641bb33eeeaec5ade25dc6d3ab091f736cad1", "enabled": 1 } }, "geoclue": { "100": { "checksum": "sha256:708228486a23ad80ffd7d6a05a65a7b655f1dbc738ef8479e473d60e4cc6d3f7", "enabled": 1 } }, "getty": { "100": { "checksum": "sha256:32e35dc7eeec4247db73e826bcd13b91f8370e8544b223547f4354f42b8937ab", "enabled": 1 } }, "git": { "100": { "checksum": "sha256:d7d5bebe879118666f1417ffae9f0772e0889406dcc768c4c3b394ec7cc2a231", "enabled": 1 } }, "gitosis": { "100": { "checksum": "sha256:b3e2375f3c95924b4daa45ecd4951af233195f655f5cab28298b21782e7df2f0", "enabled": 1 } }, "glance": { "100": { "checksum": "sha256:9414bf98dd0ad365e8c880b881286f51845c6a14bb7fc1022770cbf78c33152c", "enabled": 1 } }, "gnome": { "100": { "checksum": "sha256:86efbd64d547d08ff38c0ef19d92899032b5c7dd0972e1831dc8b58211e46d91", "enabled": 1 } }, "gpg": { "100": { "checksum": "sha256:b547a37d1a7e474dd3e085d36e4ca7276ccd68bf3ecf2e09fe7a846030a80020", "enabled": 1 } }, "gpm": { "100": { "checksum": "sha256:fc6c94c251896fa97e9298a902669eba5bb2179ad7ace408d5e6bc05973174e0", "enabled": 1 } }, "gpsd": { "100": { "checksum": "sha256:19c083472a464ada6846b9173292a9d72a06a4e5e778d69783bd51ecc1553eb0", "enabled": 1 } }, "gssproxy": { "100": { "checksum": "sha256:3c8410c8c59ae99a170ad00ee7ad66d186d3e83d66ad3c2300333abdaa7fb21c", "enabled": 1 } }, "guest": { "100": { "checksum": "sha256:eea7130d539ac61485efb94b90bbb6af7bf5dee0957c37e3ebee3a8da4797deb", "enabled": 1 } }, "hddtemp": { "100": { "checksum": "sha256:9722c02beeb44ba92062700b4c043eec7a1d39a7eb91a1289edea7a928129e0f", "enabled": 1 } }, "hostapd": { "100": { "checksum": "sha256:1d57e77e17f124ee365365513274cf402353add167245a839f89126f071bfbfb", "enabled": 1 } }, "hostname": { "100": { "checksum": "sha256:9b8c53c2622515f672ffc22c9f18a6db6fc2566e91dbdfea83faf54a6cd7c8ed", "enabled": 1 } }, "hsqldb": { "100": { "checksum": "sha256:2ab3e4455cd6580a63f4026c6b1b133e658bbca0de0d8d6c74a67ab0c3c866ad", "enabled": 1 } }, "hwloc": { "100": { "checksum": "sha256:cf362b5c9bd397a8d04ff430a173c5132b52a5fa0ca5c7ac1dd6d41920259ead", "enabled": 1 } }, "hypervkvp": { "100": { "checksum": "sha256:896fcf1247e335cbd9b09d4e0e15363dda32d2d0cede4c444416198af2330362", "enabled": 1 } }, "ibacm": { "100": { "checksum": "sha256:401eb3be55a99ce20f4514c94004d559a5dbb956979dc810c7288ed52ce20bef", "enabled": 1 } }, "ica": { "100": { "checksum": "sha256:40af9f330d0e9ae06e87e9501e571ad8dec8827a42fd207b91483ad64f73dab2", "enabled": 1 } }, "icecast": { "100": { "checksum": "sha256:c475483816608ce48e61163a5d661b65c9a4ddaf119607c204d94ec220c74f92", "enabled": 1 } }, "inetd": { "100": { "checksum": "sha256:79c55156850062f9d931ff310184927ffb8fad23533e0a81e8603c0eeb39473d", "enabled": 1 } }, "init": { "100": { "checksum": "sha256:9d8a5e4eedfeb46631070df5c3aeb0abcbd44523ec5bad52dc3942254081c02a", "enabled": 1 } }, "inn": { "100": { "checksum": "sha256:7d215013470faf3ba6da02e9b0eadd84100e27d3811d5239652e29a834dee4c9", "enabled": 1 } }, "insights_client": { "100": { "checksum": "sha256:9a25ddb951183ffbc71b83ab24c259f1572d9d47278862371f7d2c2b67eff05d", "enabled": 1 } }, "iodine": { "100": { "checksum": "sha256:f7f9445c1945584a868329ec91cdf3812e2f1a19cf4ae8145a97ab5f60e9b728", "enabled": 1 } }, "iotop": { "100": { "checksum": "sha256:4db5d6a9195336b52e11d78acc306a36d4d9b93a4bf9931f7ce42197a55f9619", "enabled": 1 } }, "ipmievd": { "100": { "checksum": "sha256:ae94ecc6603e112053d82c4b034e371fa0ae3ea6227d22730a79c2d88f7f518c", "enabled": 1 } }, "ipsec": { "100": { "checksum": "sha256:dea36801533eb8484f81e791e8e9bafbe2ee01a0a60cfabd45fcc99d768c958a", "enabled": 1 } }, "iptables": { "100": { "checksum": "sha256:158f46e1903a9e4b59492cd3b6d002226ba99215a92f8ab7254cc201adfc6d41", "enabled": 1 } }, "irc": { "100": { "checksum": "sha256:8137b7bf2df668299a1f07a7357891c9b532623b8a4a647938502f0115a5719d", "enabled": 1 } }, "irqbalance": { "100": { "checksum": "sha256:edd68cf00488897a118c97d42b68b5ebc42eade076d435668de403386055768a", "enabled": 1 } }, "iscsi": { "100": { "checksum": "sha256:c6a126a43e805c50b75ce428c6d06f2098aa3832c4c2b776c27de47db763a973", "enabled": 1 } }, "isns": { "100": { "checksum": "sha256:730425a2f8fcf7def5a5d3cd7e2fe86c4798f48ed990f01b6c4f61c2c1af4729", "enabled": 1 } }, "jabber": { "100": { "checksum": "sha256:954964e3390965fb3bd16d9e0f04e5c1733b1d52d0f9aeb86c15097128847e98", "enabled": 1 } }, "jetty": { "100": { "checksum": "sha256:cd35fa8929bafd81093cfc39e523c8fe55b1f3ebfe105630920d9aa1f50d27a0", "enabled": 1 } }, "jockey": { "100": { "checksum": "sha256:60808a39b8af95362a9e430e000fe157e610f06845766c1bf84567986773c3a7", "enabled": 1 } }, "journalctl": { "100": { "checksum": "sha256:37b991b37d592bae92deb5719d208e9272492cc81358a603aeac66012da73303", "enabled": 1 } }, "kdump": { "100": { "checksum": "sha256:8df5cf83cd544674505896c1aa2d5bbc3a63bfec5bd23082efb6d3e87fb1373f", "enabled": 1 } }, "kdumpgui": { "100": { "checksum": "sha256:1037dc7bcf3027e597f682ebaed125ffe524999e5ed9e5e59ba4d2d96dd56928", "enabled": 1 } }, "keepalived": { "100": { "checksum": "sha256:8ea474a204f637775dfaf134e51c27da197f647c4c01121c398c7135d17ae93a", "enabled": 1 } }, "kerberos": { "100": { "checksum": "sha256:7191052f585d5fafbac635931a6731644f0bd083abc2af3de0f9cf8a09dfa012", "enabled": 1 } }, "keyboardd": { "100": { "checksum": "sha256:f0d2c1e478cf050cc9a4975c3e477c7ace50c8ec4f24e6378c3bf9f5375ac47c", "enabled": 1 } }, "keystone": { "100": { "checksum": "sha256:fdb4d581281615682a3d84cb0d172eb400d4e421e05fa6eb53e935c998eb66c1", "enabled": 1 } }, "kismet": { "100": { "checksum": "sha256:3ba626d0e10d52e23eb25ed2bcfb3333d10724cc37b811d191e2377b0a50a32c", "enabled": 1 } }, "kmscon": { "100": { "checksum": "sha256:c362a617fac2d877d61251310ac60e2dd1f914746224fb481fc5877ac4c9e615", "enabled": 1 } }, "kpatch": { "100": { "checksum": "sha256:ea52717eb9f8414bf6a91da0e0dcdf8911d0dbdc6ef24636e3d55364f9d74a48", "enabled": 1 } }, "ksmtuned": { "100": { "checksum": "sha256:3792d937dae3c0c5020fcd3d231635e0e3bce9855f5182f4a78596b402b1e01e", "enabled": 1 } }, "ktalk": { "100": { "checksum": "sha256:c341246894ef6ac35ff57578dad797e3cab4576289ed54fe79a8f520d5f97586", "enabled": 1 } }, "l2tp": { "100": { "checksum": "sha256:0e48d9b7b7fa1119f136c8069d0dc8b1411c4fab98855647ca97a58e20f49771", "enabled": 1 } }, "ldap": { "100": { "checksum": "sha256:bfe184a21cf22e874bf9c4adf17d92ab37f78b212bac0a1e4205605666a72c5e", "enabled": 1 } }, "libraries": { "100": { "checksum": "sha256:2e54d7f7a9bfb8313eb16163e91dbc59ebe37e99c5d1185a1e94301b026ce971", "enabled": 1 } }, "likewise": { "100": { "checksum": "sha256:076102b64c364619c722ec50ff1bc6711583a48a3e4d628b3d5b702664ded6db", "enabled": 1 } }, "linuxptp": { "100": { "checksum": "sha256:b86785f06b028f272cb8381750cfaaa3cf73aeede9d48227f051e6fc189b8dbd", "enabled": 1 } }, "lircd": { "100": { "checksum": "sha256:be1fcc3f6423021d5dfff876c22329b76e2a8a3408277643cf19b387d3af18df", "enabled": 1 } }, "livecd": { "100": { "checksum": "sha256:ebc240faa5377ca5d45a084da15424d873958197df22f16e7781f67da72c02da", "enabled": 1 } }, "lldpad": { "100": { "checksum": "sha256:7795d0bcc893b7d1a20b56969633604286a44c02b0b1e69f3e8e6cd7006d3e59", "enabled": 1 } }, "loadkeys": { "100": { "checksum": "sha256:de8b3dab704fe78e803c012052bf2890d7e87b8b76d8fdfbf613d6d697f01c9d", "enabled": 1 } }, "locallogin": { "100": { "checksum": "sha256:952fe72cafbed51e96e7f051d9523c1ca3ef665b28c5b0f3c0d11d521258daac", "enabled": 1 } }, "lockdev": { "100": { "checksum": "sha256:31cb1e12fe7d8fbd64fe9e9913a00ac3aaebba1aa074abf1ab9bf76e101f7d87", "enabled": 1 } }, "logadm": { "100": { "checksum": "sha256:bc3d6d6cdcb3b2dac1131f16f15bed74c8b1fa37a353da2793cde2061ffdc6b4", "enabled": 1 } }, "logging": { "100": { "checksum": "sha256:78e8c00d69c84ea399c88137b1c5276084c98a468eb4df58c13894c8c569cd18", "enabled": 1 } }, "logrotate": { "100": { "checksum": "sha256:d9cbeec25733f9393c0967a8f9726bd7cd1d070c3b86c0c0d82379601b99b3dd", "enabled": 1 } }, "logwatch": { "100": { "checksum": "sha256:4195de7172d5d5d1bde97be084e23e80032e07b4f2330ac5620759d4910a4da5", "enabled": 1 } }, "lpd": { "100": { "checksum": "sha256:cf8fb1e0de66b91a3d35dd0b5a5f93a69937c1be4a8103d10e6edb70d17a4830", "enabled": 1 } }, "lsm": { "100": { "checksum": "sha256:e7a424ee7f32c812faea57710e6766f23963ec2e5b2a38486a6c024d160f9c23", "enabled": 1 } }, "lttng-tools": { "100": { "checksum": "sha256:ccb6b5c8378542594d25426623373c4de49f01e37b8fd0f2bed8d7c4f83216b9", "enabled": 1 } }, "lvm": { "100": { "checksum": "sha256:19d491afd9f4b258e4ec73ddbb3243feafc28db5f0c836784f8fa29b2146d215", "enabled": 1 } }, "mailman": { "100": { "checksum": "sha256:e121209046f2487ef64048e7ae1408da84d9c6ee6a88d6639cef3b6c4b2be19a", "enabled": 1 } }, "mailscanner": { "100": { "checksum": "sha256:528e84d30728ad73783b1cf6992be9cc1bc14f77f4d5e0ce6ca6ea2d759f2061", "enabled": 1 } }, "man2html": { "100": { "checksum": "sha256:e3a13e4c9a9f651ba32221ebaa37b8a2f7d15f7480622a755f8301022dcc8f15", "enabled": 1 } }, "mandb": { "100": { "checksum": "sha256:2c098f0612bbd9d9ee9db00a817c51726da69e7536d687adf74be7d4df7911f8", "enabled": 1 } }, "mcelog": { "100": { "checksum": "sha256:48343f6df53f591eff2c6a76bfbf12f351daa9e382785fb47e1017cd7badab91", "enabled": 1 } }, "mediawiki": { "100": { "checksum": "sha256:a901c614ce730a6943df15300120f9c0dab9fa89f234c0301f4a995f5a1a60cb", "enabled": 1 } }, "memcached": { "100": { "checksum": "sha256:03335203d0a113eead2d95a159df467fc164d12cc1c9ce4b58149da7b80d7943", "enabled": 1 } }, "milter": { "100": { "checksum": "sha256:bfbe9652c48ed1b2abdba90720f2abff11a3c31a72f5b3c56e8eac168542072f", "enabled": 1 } }, "minidlna": { "100": { "checksum": "sha256:d61743f3489bbc08417d6dbc894be0f19b50bb6e76bdb0b9a344a5b29a565a91", "enabled": 1 } }, "minissdpd": { "100": { "checksum": "sha256:ab3a5b4c6d53cd2d6d1bb1e32d587bd65219f22d8f94b58d2f9948fcc6d4bfa5", "enabled": 1 } }, "mip6d": { "100": { "checksum": "sha256:56a370111ea7709b149e4a0fbdb1ac1b123427831161d3f6170efa64a18aeb5e", "enabled": 1 } }, "mirrormanager": { "100": { "checksum": "sha256:37cda21ca34e7585b6fb861d91cafeb146ca75d0a7878bbc06ab24eabe6706c3", "enabled": 1 } }, "miscfiles": { "100": { "checksum": "sha256:f04df10eaf97dd9eb0520f7c208e6002d4c695acfb2ce58e52fd8b689c587226", "enabled": 1 } }, "mock": { "100": { "checksum": "sha256:6bf94b1043da99327b1f68a10215d963bdd7b0a7f7c3f74c801d1a8db22542d7", "enabled": 1 } }, "modemmanager": { "100": { "checksum": "sha256:412ba79b5e6a4132630f2b8da80b9e66ff5992e81ebcc206ec2f90c67ccf4ee5", "enabled": 1 } }, "modutils": { "100": { "checksum": "sha256:ca7b0cc6f9025c22895a9771ae1f761e265f05bd9ea4fc4f5d0fa6ddf409f648", "enabled": 1 } }, "mojomojo": { "100": { "checksum": "sha256:665b33a4d2e32a1a3b08ebaca792c7d1093782e3f885d048c5c2be57cea07d1e", "enabled": 1 } }, "mon_statd": { "100": { "checksum": "sha256:17b96152a9ff5a04a7cd3514903bca98e78369bc5791e7bb88aab6dcc47e3b7d", "enabled": 1 } }, "mongodb": { "100": { "checksum": "sha256:acd421e39f4c53a011a47ef7a271efc7d4f85a97575d03f69e30dedfaa1b14c2", "enabled": 1 } }, "motion": { "100": { "checksum": "sha256:9c2059177a49f2cfddca3629a29929594aec4b9dcd1fa06a80c1119fa687ac1f", "enabled": 1 } }, "mount": { "100": { "checksum": "sha256:0469642c05b99ec3b9f0472e91d161feead7bf4c4a4190cfd54b856ea9b93ea4", "enabled": 1 } }, "mozilla": { "100": { "checksum": "sha256:109d333319ff37383f2e3f6bfa356fb24b7adf3702c51f8badb8a4714c99a430", "enabled": 1 } }, "mpd": { "100": { "checksum": "sha256:4ba142d40036af5be213284b79dd953533bcb4d9846c3b697813002b98107b7a", "enabled": 1 } }, "mplayer": { "100": { "checksum": "sha256:e778408f9ad76e2da9c32482ac1f0c0495f6f552ee39fea95dccc818c70a7798", "enabled": 1 } }, "mptcpd": { "100": { "checksum": "sha256:bf848203e9b4e05ee5da14c2ced4592f7147f2674b296dd0ff76049364cb5d6d", "enabled": 1 } }, "mrtg": { "100": { "checksum": "sha256:c14a0cfca79de2171c617ec3aa77ab2a0358a78678c6711d570fe829e993a1dd", "enabled": 1 } }, "mta": { "100": { "checksum": "sha256:b8a45a6236afbcd2102f71330ffd2598a99531ec55b84be04b210c3cdea0d6dd", "enabled": 1 } }, "munin": { "100": { "checksum": "sha256:c0e62e19e20f833e62ad6a5fba025b3fc5d5ada2ea29db094f648dfa72cf713c", "enabled": 1 } }, "mysql": { "100": { "checksum": "sha256:5513598214e4ac4737a0f73a4349d8f786334d62ca92ea0099a91d89f5717103", "enabled": 1 } }, "mythtv": { "100": { "checksum": "sha256:971a0c0ef295e7fa2ec443ae9e0d752bb1acab9928fa0c233995b7e7f3f1aad7", "enabled": 1 } }, "naemon": { "100": { "checksum": "sha256:75db52cc67150da8946fb064fa2508885272c63af0628d48c4a35655eb912b79", "enabled": 1 } }, "nagios": { "100": { "checksum": "sha256:68a8d2f4d8b1ebda8d47cb325bed05299f768c756932cf3bc9c027b32142f234", "enabled": 1 } }, "namespace": { "100": { "checksum": "sha256:11505cafa9be1281e93b45a77229c321ac6bafb99673bc4c22e5326a42efca0c", "enabled": 1 } }, "ncftool": { "100": { "checksum": "sha256:5fd7eb85c1fb665c271665cf5c419d3dbb6305dfa40bfa34e8246cdb1232fce2", "enabled": 1 } }, "netlabel": { "100": { "checksum": "sha256:dec9414d3310d4f06ae940978da1b81fea6cbbd52eade15a5c7277558df3cc7b", "enabled": 1 } }, "netutils": { "100": { "checksum": "sha256:31e40dfd1f5a028f5bc20da7b21ebb5103787122703feaeec8555eb067ce41be", "enabled": 1 } }, "networkmanager": { "100": { "checksum": "sha256:26ed3cfe7224044d84743054fa4c4a4fe11b0dadbae54648d2e3c47b9f5e1b5d", "enabled": 1 } }, "ninfod": { "100": { "checksum": "sha256:9b4707184af17bb045236a2b198dc769a6c37716cb03b1c7b49698620ac0d00b", "enabled": 1 } }, "nis": { "100": { "checksum": "sha256:529d649b899b2609c0555f37e1bffd5d764943134a1a36a44bd3c0e58c42ac9b", "enabled": 1 } }, "nova": { "100": { "checksum": "sha256:8072b8372f9a40e1252ec63a0cec6687eef0f7fdec796831fe7359258fae71d7", "enabled": 1 } }, "nscd": { "100": { "checksum": "sha256:21e4816c7552451bf7003ff77e760c89894101990008582618e0e1d183c8bf4c", "enabled": 1 } }, "nsd": { "100": { "checksum": "sha256:811d6c99554491f38f1f09d4d6ec47a7bedbd438ff4aa0c0a9cf5bcbd635b58b", "enabled": 1 } }, "nslcd": { "100": { "checksum": "sha256:1f31f04eb1d7670d7b20305cc9630bd997a7422e591c90fc43ff11e86ce3033f", "enabled": 1 } }, "ntop": { "100": { "checksum": "sha256:35e90cfdcf607f9adedf10cf3e6230e04d4d9186012285a83d2a0af49babd413", "enabled": 1 } }, "ntp": { "100": { "checksum": "sha256:00a08503da498b8a8e909870a25c9e96095d58532cac58be44050af75b2565fb", "enabled": 1 } }, "numad": { "100": { "checksum": "sha256:a067fc44175cf9c5a7aa2763203f773cfe826dd0426c252d4ab6b2aae38c5875", "enabled": 1 } }, "nut": { "100": { "checksum": "sha256:837a6aa61c338fd1711d508ec7ec1430704e05d3e1447c075ac5790c25cb625d", "enabled": 1 } }, "nvme_stas": { "100": { "checksum": "sha256:6da583b7229d5e0e9044bdb93e0b2c24683b50d7b98ac4b7030f2badfb4a3977", "enabled": 1 } }, "nx": { "100": { "checksum": "sha256:99e37fc91859f012471c0382fb758ebb6276680c1aaa487fbfd5a0bb0fcbd32c", "enabled": 1 } }, "obex": { "100": { "checksum": "sha256:eae97e1b0d9f65da798618786f7a44fa088ba644fe43bd46cd518c0838d3317d", "enabled": 1 } }, "oddjob": { "100": { "checksum": "sha256:bd4443d1334e92e171729074cce48baecb8e4707aad0eb6f25d106886866d325", "enabled": 1 } }, "opafm": { "100": { "checksum": "sha256:9e0ccc324238937c2fb3cc36ecb8210c7691b805f3739b23e1cef95be82bff17", "enabled": 1 } }, "openct": { "100": { "checksum": "sha256:66b940104f2ee7b701d17b5f2b7c5787c4d0d27c8434753cd5ffdc34ad662a3e", "enabled": 1 } }, "opendnssec": { "100": { "checksum": "sha256:f0ac631bf1cab954ad343673dbcf311ce2686f1a90858ea31ef2b06260e2e142", "enabled": 1 } }, "openfortivpn": { "100": { "checksum": "sha256:0f8abc6a81b955b9888753f1b26342f1a4c943bdc0ced8cdcfde51c2cd12e0c6", "enabled": 1 } }, "openhpid": { "100": { "checksum": "sha256:b0b40f9da8cbf6f96048d61d33cdedd8c818a8ed3177de37291685089ade8483", "enabled": 1 } }, "openshift": { "100": { "checksum": "sha256:890bfacbe2ba8de8ee35c0d7bb5a8191fdb49819d0d64441bd1d4f442d34adbf", "enabled": 1 } }, "openshift-origin": { "100": { "checksum": "sha256:87d03b717c75c41a100d297e542c47787922e5dd2f01d7b90071263d48687975", "enabled": 1 } }, "opensm": { "100": { "checksum": "sha256:b017922f020abdd60b85a2b5d4743f982e85fca7f783dd32be78311fd5197ba7", "enabled": 1 } }, "openvpn": { "100": { "checksum": "sha256:e165f80516476ffe1b93bdd74ad3a6d69720e0136fc3620f6ec7710dc9765007", "enabled": 1 } }, "openvswitch": { "100": { "checksum": "sha256:e9581e7e22fd035c8e7312f22d04171ffb807e16eb57205413fcca8deac68fc7", "enabled": 1 } }, "openwsman": { "100": { "checksum": "sha256:42155472938e6b25076cda306a0c176db03ae2722597fd4004380b5222589b67", "enabled": 1 } }, "oracleasm": { "100": { "checksum": "sha256:f47fdeba48ebedde1b490b598cb46fd8b30d4e86264f7f3ce68bd2af91409792", "enabled": 1 } }, "osad": { "100": { "checksum": "sha256:ba8c88da0b90ee0eec84f709a7a89bb3b3e458db643317646e8379cb9d403255", "enabled": 1 } }, "pads": { "100": { "checksum": "sha256:6279e391de4f2978d93dd1a88c23aeffb8028bc50d81a0776a7247a011b3898f", "enabled": 1 } }, "passenger": { "100": { "checksum": "sha256:09ef31338f328d05054169704c4cdcb78f291a93fd0d5165fdb33409d1c46018", "enabled": 1 } }, "pcmcia": { "100": { "checksum": "sha256:94fb82e5d82810193cc60f465995348d0fd733501f2691d9cf8058b4bc611078", "enabled": 1 } }, "pcp": { "100": { "checksum": "sha256:c8224a15f7049ea64edc179a5f7b940ffe72c46266cf3bccdf125b1b929e975b", "enabled": 1 } }, "pcscd": { "100": { "checksum": "sha256:b33562b9e8be469abed92ac9cb29e55e58e5d28caf5c5a295486fa1da4035d6b", "enabled": 1 } }, "pdns": { "100": { "checksum": "sha256:e41889c43b795845eb734032b62894802290e804baecf62685e53211ee3997fc", "enabled": 1 } }, "pegasus": { "100": { "checksum": "sha256:e9b396ef7a02cba4482e9f56fde9f1fbfa7e04de4dfd3d80b3523ddb332ffdab", "enabled": 1 } }, "permissivedomains": { "100": { "checksum": "sha256:2453bad4ace526f3cf2c60b358e95a5476692ef25da107b10f52f3af27c056d2", "enabled": 1 } }, "pesign": { "100": { "checksum": "sha256:8b5834f435b3bd76aba49516a21dcc5f45c867c4c1e748543e4c573085c7a15b", "enabled": 1 } }, "pingd": { "100": { "checksum": "sha256:ac0c04cef30f7c01619c07f9e4c2028a7d647cafd46e818e163222bb9f6a98ba", "enabled": 1 } }, "piranha": { "100": { "checksum": "sha256:7518a890684f833f06a9e0db0bc13bc187c3462f83aa0c07848d0fdf8f9d5461", "enabled": 1 } }, "pkcs": { "100": { "checksum": "sha256:2daf9e32ec14aa1b96f49dbc4cdd4afd7d666a87e2ce3acf5c35b32a681fa3e4", "enabled": 1 } }, "pkcs11proxyd": { "100": { "checksum": "sha256:7ab6b9b9691f9a43bb258c657cb2748c10b811530461739b2449a7dcbedc6d5d", "enabled": 1 } }, "pki": { "100": { "checksum": "sha256:949a05604dd067f4bfbe8aefc95565ac5f1b14598713063d245e8f38fbf01a9a", "enabled": 1 } }, "plymouthd": { "100": { "checksum": "sha256:873b2ae3732ee828b2fe956739072318924e333974d09be23d8af18d55150de5", "enabled": 1 } }, "podsleuth": { "100": { "checksum": "sha256:fe135f8a642cd53b19fcbeca60b9eb5e0d2c5cc84f89167e686ae5f9fa42e6ed", "enabled": 1 } }, "policykit": { "100": { "checksum": "sha256:c6dc9c24a34be05b9fecb9dc2566e35a47d7b5d0a70ce3249dda642258374f5f", "enabled": 1 } }, "polipo": { "100": { "checksum": "sha256:0a89a59bbe58e1a5a0d8bb9dab70b6967cda66ce3a110993446d1213a488b631", "enabled": 1 } }, "portmap": { "100": { "checksum": "sha256:15d9f332240b57891a19bd34578401f532242fa4fdae003d872eb1ddb009cf86", "enabled": 1 } }, "portreserve": { "100": { "checksum": "sha256:69fec82f8d2a804a8641167815d32835237f878fe7d9d52297f7d4f4e732f3a8", "enabled": 1 } }, "postfix": { "100": { "checksum": "sha256:c4c885b4103c94428b70933dadb8809fa695b3296d474948aac039bd6f019c87", "enabled": 1 } }, "postgresql": { "100": { "checksum": "sha256:193af5fba661c32470026dbf229440236737a59efb53b0fabe2c9aba14c35ccc", "enabled": 1 } }, "postgrey": { "100": { "checksum": "sha256:f3beab7d301e925c9114fc16905d28eb713bc118d215abe2f17a3db1514ff93a", "enabled": 1 } }, "ppp": { "100": { "checksum": "sha256:8673b905d5b897c499e6911d91201e349af2666d906dbe2c1abc39c9f3a54116", "enabled": 1 } }, "prelink": { "100": { "checksum": "sha256:99583ebe5e11399512e284d9d4de0752a1a6832e629953072b9ee94bb3980c8f", "enabled": 1 } }, "prelude": { "100": { "checksum": "sha256:a061be8d0233c31d52544aef63959c56aa5c634818898f465d5717918d654266", "enabled": 1 } }, "privoxy": { "100": { "checksum": "sha256:95e34699603fb38d98bc4491202d783f96ad0d51dd80cf80fac65f45b6fc1a4c", "enabled": 1 } }, "procmail": { "100": { "checksum": "sha256:1fea11fb5b09a5956ca32654374d35ef281093f98cda7d0bc462d1b2a9cfcdd4", "enabled": 1 } }, "prosody": { "100": { "checksum": "sha256:e2b0a84c1151d1f51128b53a7f406701188ef5c8ceb18a733db4f62d58a19b98", "enabled": 1 } }, "psad": { "100": { "checksum": "sha256:ecfa830cf53375b2ea1c0fb0921f5adeb47a4471488765fa43e724d7f5e9a11f", "enabled": 1 } }, "ptchown": { "100": { "checksum": "sha256:870f119b4194e42aff2f71722fb1fb11868f88d3bd2f323eacbdefeea2a9ef4e", "enabled": 1 } }, "publicfile": { "100": { "checksum": "sha256:6056d698ab7914842d62ef8908402e481e02014fbcf03c984df01e768f30abf8", "enabled": 1 } }, "pulseaudio": { "100": { "checksum": "sha256:4a01f517ea0fd510aaac2e918afaef70e40175f2c4744d96bc1fd9647c915e1f", "enabled": 1 } }, "puppet": { "100": { "checksum": "sha256:35ea9020284c9fde1e544bb2b15698ea8b3ae46a3187539542ead64bf563020d", "enabled": 1 } }, "pwauth": { "100": { "checksum": "sha256:ef67d14c742393291981705da797a401a758833e7ab4f3a116cce7b662836761", "enabled": 1 } }, "qatlib": { "100": { "checksum": "sha256:3714e0a36d43f8667e80c187637d847425155bde011be321043371b15098e3c8", "enabled": 1 } }, "qmail": { "100": { "checksum": "sha256:e99893e32bdfbe81a09e2b01a27cf0ea8865e54e3b0fcb1563637a4ed59455b2", "enabled": 1 } }, "qpid": { "100": { "checksum": "sha256:cd5654f248ed789cc12534dac789b9c1d3d32d325dceedb4eb27afa2c6c61780", "enabled": 1 } }, "quantum": { "100": { "checksum": "sha256:aa4c8076bcd3d92db74d5e2394d885e6b10d729b86081f1683e349ac6da41794", "enabled": 1 } }, "quota": { "100": { "checksum": "sha256:f34ce67cab4573756019b1589e0e518c4933ef76887e0437f0ae582f6f703a9b", "enabled": 1 } }, "rabbitmq": { "100": { "checksum": "sha256:a759f5eba8608e6190b1649aeb7122d50de1b985878d9c1d5822bef5bc2b88e8", "enabled": 1 } }, "radius": { "100": { "checksum": "sha256:ad64588fda03fd0d8c6e9b7b4afa31b20472df41ee50b68b8e9f07d6878dcf81", "enabled": 1 } }, "radvd": { "100": { "checksum": "sha256:225787ffe39a022ba6c552cd389e2ddb613353c5ca65bbd572d67ccf7dbdef6b", "enabled": 1 } }, "raid": { "100": { "checksum": "sha256:0da1f5f76dcf060623ca3599040b0c03e5626b2624bd74d3502697ef1e11f387", "enabled": 1 } }, "rasdaemon": { "100": { "checksum": "sha256:bdb6f062284dd7b12282604373958122db4d18b262bfd844520e919ed2845e5a", "enabled": 1 } }, "rdisc": { "100": { "checksum": "sha256:4d86b4c1044e1a8766f6ac6ade0296aa461ef5550efae9aeabc99a5c946936f6", "enabled": 1 } }, "readahead": { "100": { "checksum": "sha256:43044ec71b5839b47dc5fa30b7d9ba500908143b64ad3b608775736a44b046d5", "enabled": 1 } }, "realmd": { "100": { "checksum": "sha256:bcad6a7d597f894f1985bf46623a62ac2cbeff634770542c93e40a5fd7be93a9", "enabled": 1 } }, "redis": { "100": { "checksum": "sha256:075a71c98c377420b53c7b584f5d963598d97e7e49f58eb67bf0a5be1b20a908", "enabled": 1 } }, "remotelogin": { "100": { "checksum": "sha256:e6f98b186ddfff611082d29031ae948dd23c737d7ff1d713760d1794906698ae", "enabled": 1 } }, "restraint": { "400": { "checksum": "sha256:95d0d03fbc1d4147f02a0b3da7cd76efbdd75d1f5812cf6160e922336abbf270", "enabled": 1 } }, "rhcd": { "100": { "checksum": "sha256:92774c1cc6fd16156001c4facda140bb33ddba4269198bd016bda6d92eac079e", "enabled": 1 } }, "rhcs": { "100": { "checksum": "sha256:9c65d7909e4c443dc490c9b58a6e6e5471e58b7e934d10f08359db09b5fc395e", "enabled": 1 } }, "rhev": { "100": { "checksum": "sha256:03591f21a98cba8e5f4c5272a799067eca3ae1520a02dd50c13a607a318dfcc1", "enabled": 1 } }, "rhgb": { "100": { "checksum": "sha256:9b0fc4d87d27875c84b7c21c3b99d0af2e52903b611cb360804fe9f50f9d6f7a", "enabled": 1 } }, "rhnsd": { "100": { "checksum": "sha256:271d37f30d3a338cc9bd6199a488d48a7c88068675c462df5071bca8c1f7c438", "enabled": 1 } }, "rhsmcertd": { "100": { "checksum": "sha256:82815996833cc475e386b8e94b87b1516dd876fccd5b2efd4c88ccc4a0854e6d", "enabled": 1 } }, "rhts": { "400": { "checksum": "sha256:3a96393cd8cd9fc1faefc62db7ca107be15963005f4ef420ae2b79b4035abd0c", "enabled": 1 } }, "ricci": { "100": { "checksum": "sha256:308e6f81ea6fe3a196db021ad12cb7baae8bdd19f212bdc1f8ab404c27019abe", "enabled": 1 } }, "rkhunter": { "100": { "checksum": "sha256:aef52847742df6eecd94fe50a9fd5021637088620a576daa6659b9783b9d8553", "enabled": 1 } }, "rkt": { "100": { "checksum": "sha256:99e9cda55e22a71ebb3d74c56051f69ae895dd8134b627dcafda4b0a925e9ae9", "enabled": 1 } }, "rlogin": { "100": { "checksum": "sha256:6d7c850c1ee0942bd60c30a8f112b82fb182a24bc594d3707bf7801c4b80d5ad", "enabled": 1 } }, "rngd": { "100": { "checksum": "sha256:8b5d8041e76b9fdbad0d45ad1a37975171e424e56718dc139a93063729905cd5", "enabled": 1 } }, "rolekit": { "100": { "checksum": "sha256:01ae038a225e72270a6acc6bc6cc0b36c3b09a10e68112da9ec1b9d91fb414d5", "enabled": 1 } }, "roundup": { "100": { "checksum": "sha256:3b74654cbf5033ee6ab8c2dbc22773af846c129879c2b7355bc99df7c293833c", "enabled": 1 } }, "rpc": { "100": { "checksum": "sha256:c97daf9137ca479db4a7315e77f4475158475e674a12e1f42fa97f9db6cb398e", "enabled": 1 } }, "rpcbind": { "100": { "checksum": "sha256:a369faf5cb76fd3dd29929a38cd6b3221e7f98cb3c57675cfeeef9736b041283", "enabled": 1 } }, "rpm": { "100": { "checksum": "sha256:ededd2ec5ee4506eab2315599bf43a3deb8ceb83686c97406722968f5e93d759", "enabled": 1 } }, "rrdcached": { "100": { "checksum": "sha256:d542bd71ac70b65fbe712194a3727e826ac414096230de7bc5c4a2aea037756f", "enabled": 1 } }, "rshd": { "100": { "checksum": "sha256:d87f4f7f764a6282dccdfba116b34296f94f62e44c8ac2b51ae6ae7850be63e2", "enabled": 1 } }, "rshim": { "100": { "checksum": "sha256:0ddb10f8d7a2bcd92cc2e68302467326c643e02a5623151c3168135a3ec290de", "enabled": 1 } }, "rssh": { "100": { "checksum": "sha256:d5958076535790d5bad592f7eb70977ac9437bc0a7f97b34e431b9f414973648", "enabled": 1 } }, "rsync": { "100": { "checksum": "sha256:a0a5f9fa55b3888c84c566cce656011bc1ad8dab2c4b700ea6bf2341f556d590", "enabled": 1 } }, "rtas": { "100": { "checksum": "sha256:341b47d041b0df0aeadce1cd1a3cfa195aa9c5569d8f998edcd0169c13017894", "enabled": 1 } }, "rtkit": { "100": { "checksum": "sha256:5fab287cedea124b92aecc21550dafa4218805485040915716c00486d9cf04ca", "enabled": 1 } }, "rwho": { "100": { "checksum": "sha256:75872a4c3a9922ba6935d078f68aab2b562705085e258aeb7dd2bfc3e087615c", "enabled": 1 } }, "samba": { "100": { "checksum": "sha256:a2ea92601c02a22ffb4551f8bbf53174f7c363e07ebe198e74dafe3ebedd82d3", "enabled": 1 } }, "sambagui": { "100": { "checksum": "sha256:8ead836404a7e8a8b68aabeee3c649c214df9699b45f6c784989b3fcdd4f9e1a", "enabled": 1 } }, "sandboxX": { "100": { "checksum": "sha256:0776fe05eb5bcea62b434f30b893f79c06c7a18f352de24ed2546817f566c429", "enabled": 1 } }, "sanlock": { "100": { "checksum": "sha256:28c6186dc1bc711d42a3d1d2ff051038f0dd4a0259544e52f68c61139efd3a4e", "enabled": 1 } }, "sap": { "100": { "checksum": "sha256:1ba912e3e1e441a6b3f7b88c5603ff8ae915efdee90a76ae34e41d8556d851b0", "enabled": 1 } }, "sasl": { "100": { "checksum": "sha256:61fd6af55a226605d0ad608c145c6650ccb29b31d7ccf50e32b95ec7686c53b3", "enabled": 1 } }, "sbd": { "100": { "checksum": "sha256:f4a521054bd52ace05da7d520aabd132df773acf3037e2f414d81fe27f9ef04a", "enabled": 1 } }, "sblim": { "100": { "checksum": "sha256:9f9bcb3b8190d76c1381443107531fc17c78be637320f00310e4784a7ebc5c3a", "enabled": 1 } }, "screen": { "100": { "checksum": "sha256:e0cbb8905423a2b910876e9ae63348c3abb6fde30522e4b2124b06818f35638f", "enabled": 1 } }, "secadm": { "100": { "checksum": "sha256:8ba05b99b8e53560434f3e089cf7b104a3545febf9e28fdf0f03960e4741d88a", "enabled": 1 } }, "sectoolm": { "100": { "checksum": "sha256:515aa85f67c92cfedced542c1222bd136f8fb51801166f807145141acf212736", "enabled": 1 } }, "selinuxutil": { "100": { "checksum": "sha256:428f92fd885af8659db055061639069b8be8ab2d2386beb612db65261aa12673", "enabled": 1 } }, "sendmail": { "100": { "checksum": "sha256:9ec599103477e82c5a8da5e47ae1056cc519ccb236e171f9acfcc748af47b679", "enabled": 1 } }, "sensord": { "100": { "checksum": "sha256:a422c0e74e5b98d753f582bc2adacd6be541580fdd4b226ccd9cd05ece2eba08", "enabled": 1 } }, "setrans": { "100": { "checksum": "sha256:96030787f55e1e8c4d76f22919ca8dcf17a16cd08de745aad5b7f740f14c1958", "enabled": 1 } }, "setroubleshoot": { "100": { "checksum": "sha256:20f38095965fcc5d0b454413085c7609588086942ae89a65d19d6e6e0a06a9ee", "enabled": 1 } }, "seunshare": { "100": { "checksum": "sha256:fc381f26f9cbc83f72d5063e4d028a5365401a202052012755fa49ea63a51a42", "enabled": 1 } }, "sge": { "100": { "checksum": "sha256:3ff3ca366bd0571039f42bf990acbe10aed992be87c89450536e2fdc0e31961a", "enabled": 1 } }, "shorewall": { "100": { "checksum": "sha256:19e4f50a756989bafc3d30aa2679b9730c5a297e1aa20f71425f024fe934c574", "enabled": 1 } }, "slocate": { "100": { "checksum": "sha256:15789a6c47c2eba6ef224a7f4464819b37ed76cc6d7efadc7b1be0f212c85046", "enabled": 1 } }, "slpd": { "100": { "checksum": "sha256:404e3e22459d7cb94c12408a3bc9d320f58eee24788ac11648318722d1bc0bee", "enabled": 1 } }, "smartmon": { "100": { "checksum": "sha256:628fc3630bfcc5437ffbe528c3c4c0d7a08130b7b01cb1a66cd630bf05eb8795", "enabled": 1 } }, "smokeping": { "100": { "checksum": "sha256:f05ecf227e69b096900cc9fcd863a6d5457d64d1c0c41b9b1fc9aac20d02160d", "enabled": 1 } }, "smoltclient": { "100": { "checksum": "sha256:92f62e04b6f14736d375aae3c22f2da5edec288a6997212d194e062501a7128b", "enabled": 1 } }, "smsd": { "100": { "checksum": "sha256:006443b6c33a37037fdc4dc689bbfc7695251a2766429859137555797652aa33", "enabled": 1 } }, "snapper": { "100": { "checksum": "sha256:2af2c59f061d181581c0ee972630cac466f74d873731de2aa4a27dd0b9fdad9b", "enabled": 1 } }, "snmp": { "100": { "checksum": "sha256:c9ef0cdfb822eba65e29c8bd6594ad8cf9bc5a7cdc3aeef553475c7127619d4b", "enabled": 1 } }, "snort": { "100": { "checksum": "sha256:4068e4127dc3f2252006ed676a6c27c3ee34df690139c8d5c55813ea30e7ceed", "enabled": 1 } }, "sosreport": { "100": { "checksum": "sha256:25fdb658f00c0a3bc753a69bfb58d2f054903e7000ad0c7788c3eb712d79bac6", "enabled": 1 } }, "soundserver": { "100": { "checksum": "sha256:a8a0fa6265d7b4b17243ff1fca6f0ba49135e12d0ded004bb7c515291f30641b", "enabled": 1 } }, "spamassassin": { "100": { "checksum": "sha256:6a40d1bd53affea088d732dfa1cd97b0dbb30d88a5667ccf25c148942e3153b5", "enabled": 1 } }, "speech-dispatcher": { "100": { "checksum": "sha256:f23e5facc957ee792b529612c165040e87df4a7b49dc09c7887d2c5e6e4d7e41", "enabled": 1 } }, "squid": { "100": { "checksum": "sha256:68b2c3f88c1457ed4474f6ebbc85329d444acf473b25b0c505d58ee338399176", "enabled": 1 } }, "ssh": { "100": { "checksum": "sha256:309d6aa526bdbffec6b49778a7d4f369cfad582b78e54e1a2bb734e0e555fd16", "enabled": 1 } }, "sslh": { "100": { "checksum": "sha256:008b343789eb4b2aef06e0eace24fb651fe60e8851b9f86bf5aa8b5e6eba8092", "enabled": 1 } }, "sssd": { "100": { "checksum": "sha256:3e002e9f28e23f909ff86d1fbd93f16f5fa23e4803988f99eab78fcb5ea968c2", "enabled": 1 } }, "staff": { "100": { "checksum": "sha256:aa67b8e3b4e6a9795ac9b1d96527d69bb4f202f78e89d42241b697cd2590637f", "enabled": 1 } }, "stalld": { "100": { "checksum": "sha256:bb467e55083903177f5b764521080eb672d20393c61f174a1797a4ed7634ff93", "enabled": 1 } }, "stapserver": { "100": { "checksum": "sha256:67d0f2920bcead63390df082d6e187d9317e6ac0330fbd2f40cc29b054845b16", "enabled": 1 } }, "stratisd": { "100": { "checksum": "sha256:7581441e9dd2586371712d1c173061e6e3c0085e8654c97940963b168a7ea3ef", "enabled": 1 } }, "stunnel": { "100": { "checksum": "sha256:b6dbad9b252aec8e5a939b3e08454119cbea8725c83e410c260d6a06a388cd8f", "enabled": 1 } }, "su": { "100": { "checksum": "sha256:857ff8acea720516985a8942d946c75b39ab4150b8983cdc0ba3ed99c82d1885", "enabled": 1 } }, "sudo": { "100": { "checksum": "sha256:9877c22302189668e7a948cfda4273f7455d6f2ecec0bb6e1f3ffb2a217dc9d0", "enabled": 1 } }, "svnserve": { "100": { "checksum": "sha256:bb2418014f2be4d6b143586490bba8f56ee56d0b6a938b795118fa204f829016", "enabled": 1 } }, "swift": { "100": { "checksum": "sha256:8ffa1ab3ff17d35c16b701936fb37a4a1f398c801c51310cc084cebc0acf4f7c", "enabled": 1 } }, "sysadm": { "100": { "checksum": "sha256:e35a582f0adf2097f4ed9f762ef61236019c35020c6e9d11087f8e05c31515ad", "enabled": 1 } }, "sysadm_secadm": { "100": { "checksum": "sha256:26c80707a4a08f665d249670d9329eda36af31bdbb084d49e72198221090b7b8", "enabled": 1 } }, "sysnetwork": { "100": { "checksum": "sha256:3d4405968a8c9095460ab7e66da67f4e1168eb7194d630559aa58b78bdb25135", "enabled": 1 } }, "sysstat": { "100": { "checksum": "sha256:b767e9a66c473f2e3bb06336f0119a220bf08ef4380333c16b19acd05fb40f6d", "enabled": 1 } }, "systemd": { "100": { "checksum": "sha256:21181339b1f50691a6e73a96e9000604788b3b41e7a143fe3f788d4f0f576c99", "enabled": 1 } }, "tangd": { "100": { "checksum": "sha256:8be8dbcf7392331a36d09547dc7a136f37b21e7eefacc7a849dd3ea29a8c2f1a", "enabled": 1 } }, "targetd": { "100": { "checksum": "sha256:61a7d4ed0bbad560b1507fb13a27cb80f0ba9177e188369ee0388e208ad9a86e", "enabled": 1 } }, "tcpd": { "100": { "checksum": "sha256:f90f0a080b7822408e608f8ace404c2b3a7cce145fafda16d16c39c90412dd7b", "enabled": 1 } }, "tcsd": { "100": { "checksum": "sha256:e876f3f8dc496189fa7344870ca42edfd421dda506dcaaeaac67881f1c4a9f13", "enabled": 1 } }, "telepathy": { "100": { "checksum": "sha256:28d44df10f515e1f265d7bbdf51a377351efaaf2831b6bcc1ced928a358436ac", "enabled": 1 } }, "telnet": { "100": { "checksum": "sha256:7da89bf5c9cf25955b4516e9fa763239b7bb23084aad6a04c1933fd50cf315e9", "enabled": 1 } }, "tftp": { "100": { "checksum": "sha256:05185332feaed95a99162df1b3dc2bfb37c879d514db78015c4c083ca57028fd", "enabled": 1 } }, "tgtd": { "100": { "checksum": "sha256:178828a7d7ff1c30a3df812905214bce9de2ca0744dbe4dfe01691deb01d629e", "enabled": 1 } }, "thin": { "100": { "checksum": "sha256:2a20802eaae04dc1cf81bd9149ebd4db8e88eec3388b2d50bf078826d24994f0", "enabled": 1 } }, "thumb": { "100": { "checksum": "sha256:dffaeb8dd84c75f302c5cab90c3e892c5f6e6efa26167a9f8efe949ac613f99d", "enabled": 1 } }, "timedatex": { "100": { "checksum": "sha256:f29a6896ff0b88c513fe7998390aae2ebe6c1d6f5785e10c6ca33c6411a8bfdf", "enabled": 1 } }, "tlp": { "100": { "checksum": "sha256:9fd7f4b68daa0c6c8597e029f0a269e89731c2169d3f5d296062ca682d4e3ebf", "enabled": 1 } }, "tmpreaper": { "100": { "checksum": "sha256:9c14537b3fc129bf5e108017c16a8b5175f1c8a50ccb9d578bedb620e0e32503", "enabled": 1 } }, "tomcat": { "100": { "checksum": "sha256:48d267290c8bb1bb35a48bede09286690dde4c2991be32256776137569f6c586", "enabled": 1 } }, "tor": { "100": { "checksum": "sha256:18a978369799a6144a4dd7e0cab365a6c200218e64b453a70dd501613de3e379", "enabled": 1 } }, "tuned": { "100": { "checksum": "sha256:6455edecd5a99a3f6a3d28887408b6e4a65532965072d0733ba271e66712345b", "enabled": 1 } }, "tvtime": { "100": { "checksum": "sha256:80d528807e7b8e942a801bdfab3fc402dbc09163536462ccd8a678dcfba4725c", "enabled": 1 } }, "udev": { "100": { "checksum": "sha256:7f90ada00012cf15d357503034f98494f56c9f1e6bb82249e4a2092fe0991be5", "enabled": 1 } }, "ulogd": { "100": { "checksum": "sha256:cd675dd586c7bf94418a5fdddc2f87502970ec19911356ec1d628b109a339dcf", "enabled": 1 } }, "uml": { "100": { "checksum": "sha256:612f53a60d4b66f641a379ce5f96d27fe6214edf138a579be295d4fcabf28b94", "enabled": 1 } }, "unconfined": { "100": { "checksum": "sha256:a1fbd41ce1ac921d18a97dbcb741ce9a1cdd7fe6867c676400c648f713603052", "enabled": 1 } }, "unconfineduser": { "100": { "checksum": "sha256:1f9ddc44dd992f32b0bc2788c6310c5c48536a06bf8a2855ec52999d1b13f1ac", "enabled": 1 } }, "unlabelednet": { "100": { "checksum": "sha256:8674a308bc39857ae30458a7ffe592c50fa6c61ed9efbda30016b9f5fcc55cb8", "enabled": 1 } }, "unprivuser": { "100": { "checksum": "sha256:6843c341541e72e6e070ecda04baf2244fbcd2c50e876e403ab218959f39e7da", "enabled": 1 } }, "updfstab": { "100": { "checksum": "sha256:81736986d7599acaab02dd9a6512d81ea7f59476edccbe71b3ef32da493f5cd2", "enabled": 1 } }, "usbmodules": { "100": { "checksum": "sha256:77c24ef9ae5b3e19e29054146e0b09b7fed37fb70aff8e9d53e85e262050d22a", "enabled": 1 } }, "usbmuxd": { "100": { "checksum": "sha256:025da4e2c2b5788f98226701462c27cefe41ce6abb617c699a29f5568678dae7", "enabled": 1 } }, "userdomain": { "100": { "checksum": "sha256:7762b92e8c8abf9e77a3cc23071aa6ce461d57fc4c629858f23195fb7a74d789", "enabled": 1 } }, "userhelper": { "100": { "checksum": "sha256:7bf185d5031851326d2d108c0d70d9653b1dc80d77f9d7f58d66250df509245f", "enabled": 1 } }, "usermanage": { "100": { "checksum": "sha256:f69db7e7153bd486110c500cb4817d274c159179afc4aa01eb1b86af39f01483", "enabled": 1 } }, "usernetctl": { "100": { "checksum": "sha256:8f0c31f0713ab3e13fcea14186a5b0e749791dd7de8967efe21b6b427d1e0378", "enabled": 1 } }, "uucp": { "100": { "checksum": "sha256:2eb5e03e9e4a0a294e97adcaa6e188881917f403561330d4c585f24c0e8801f0", "enabled": 1 } }, "uuidd": { "100": { "checksum": "sha256:2348ef1729d8e40d4287325736452cce1c24fcdf5af11df06d315e6a3ac6acf6", "enabled": 1 } }, "varnishd": { "100": { "checksum": "sha256:ecfe0c70380e1129186a81bc8778fe5904c88fd9c616ad69851241dd57d7fd78", "enabled": 1 } }, "vdagent": { "100": { "checksum": "sha256:8c8faf892551960dce6c61dbc2145ee637921a7ff62cc96328978d9946024ebd", "enabled": 1 } }, "vhostmd": { "100": { "checksum": "sha256:04f9af430360595438f29cdba50fe3ce96fbe3a59cd1396d168cf3be57469538", "enabled": 1 } }, "virt": { "100": { "checksum": "sha256:d94f6df278181c096f0e7a90cbee1dc1ab07bd37b3d3577997f40eca5c3df8fd", "enabled": 1 } }, "vlock": { "100": { "checksum": "sha256:e213b634c2093ebf894adad9287c670dd73e43c79538c1e11d6b0cff574fd23d", "enabled": 1 } }, "vmtools": { "100": { "checksum": "sha256:9d2650b10280e0ecbdbb20692515598049e290ebfb426eafa5c0b067a2d33bf1", "enabled": 1 } }, "vmware": { "100": { "checksum": "sha256:e1a096c9fa1aa6c4244c3cf8340c14a67ba60ade122a7bb5167604c4cdc2e341", "enabled": 1 } }, "vnstatd": { "100": { "checksum": "sha256:03ccc49fc408c718f89b93502f1a7073efc8d9f81d18bcb69cede46300340130", "enabled": 1 } }, "vpn": { "100": { "checksum": "sha256:71544fa054595557124ab7098947a966e33b98584eb1345f955e754be531038e", "enabled": 1 } }, "w3c": { "100": { "checksum": "sha256:57482e874ec657d4d2a5840a4de5524df24b14e974e1a19e42bffc12428a5eca", "enabled": 1 } }, "watchdog": { "100": { "checksum": "sha256:81379d3f5bd7462ef59a44e4e6bbc5d5261fc8633be7a7ed9da248b7ed47700a", "enabled": 1 } }, "wdmd": { "100": { "checksum": "sha256:f0cec2f5898981b4e2768802facb4eee17c42e46d8da58b20467d7fd5ba0ed3b", "enabled": 1 } }, "webadm": { "100": { "checksum": "sha256:994f8fb7a32079d30b68a1cc4c51fe6a10e425c6145a689e32ac0053a2ded7c4", "enabled": 1 } }, "webalizer": { "100": { "checksum": "sha256:ec56ec85299e7b46853f9d34abae5c56aba7244054e48ac40cb4cf6dee602dc1", "enabled": 1 } }, "wine": { "100": { "checksum": "sha256:2750d4c101cacb336bb717f3beba2e0967ce6d957609f647e6f92966536894c6", "enabled": 1 } }, "wireguard": { "100": { "checksum": "sha256:c039f714e791b72444890960412088848de91a23b909cab26386369f6fa55b67", "enabled": 1 } }, "wireshark": { "100": { "checksum": "sha256:60940e6f75a00c73fd15eea03eb69aa8151b3457020034910aa0a2b714cc2241", "enabled": 1 } }, "xen": { "100": { "checksum": "sha256:92dfa4c75176cfa571f1310629ba5befe920f6cbbec03144aa23a87a1e27e2f3", "enabled": 1 } }, "xguest": { "100": { "checksum": "sha256:1862f5d7cfb921ef8b6adf56afd19db431a8f1aca74a38f46a543b83c0d02ac7", "enabled": 1 } }, "xserver": { "100": { "checksum": "sha256:ce38e8a07ff5e1061cc51311f31a91233e02fd23d57ee21b9977c02ae6e361c1", "enabled": 1 } }, "zabbix": { "100": { "checksum": "sha256:bf751940e6d5acd7944addf8099e67e309b367b70a5ffba89a437301b7251619", "enabled": 1 } }, "zarafa": { "100": { "checksum": "sha256:cf760718fd967208648f32ae3528e9d42e7e2933487d3052bd5809474fc577ec", "enabled": 1 } }, "zebra": { "100": { "checksum": "sha256:bba64baddec0addb05ac038c6a89a5a0fab53e43d797d8353a8a209cd66ca4e2", "enabled": 1 } }, "zoneminder": { "100": { "checksum": "sha256:4e20f61fbbe4afebaa084066e9e4c445c6d7d36e1254642bef5315313333ad40", "enabled": 1 } }, "zosremote": { "100": { "checksum": "sha256:d0b7718e1270a44a288569b8a2f8c0a4931ea45a4c4e04b6264e0ff4f7752283", "enabled": 1 } } }, "selinux_priorities": true }, "changed": false } TASK [fedora.linux_system_roles.selinux : Load SELinux modules] **************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:115 Saturday 12 October 2024 13:40:25 -0400 (0:00:03.396) 0:00:26.673 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "selinux_modules is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:128 Saturday 12 October 2024 13:40:25 -0400 (0:00:00.041) 0:00:26.715 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.selinux : Restore SELinux labels on filesystem tree in check mode] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:136 Saturday 12 October 2024 13:40:25 -0400 (0:00:00.020) 0:00:26.735 ****** skipping: [managed-node3] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.nbde_server : Stat the tangd custom port systemd directory] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:14 Saturday 12 October 2024 13:40:25 -0400 (0:00:00.027) 0:00:26.763 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1728754805.2174883, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1728754800.5175111, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 658505922, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0775", "mtime": 1728754800.5175111, "nlink": 2, "path": "/etc/systemd/system/tangd.socket.d", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 28, "uid": 0, "version": "447156668", "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.nbde_server : Get a list of files in the tangd custom directory] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:19 Saturday 12 October 2024 13:40:26 -0400 (0:00:00.326) 0:00:27.090 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "nbde_server_port | int == 80", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Manage tangd custom port systemd directory] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:35 Saturday 12 October 2024 13:40:26 -0400 (0:00:00.040) 0:00:27.130 ****** ok: [managed-node3] => { "changed": false, "gid": 0, "group": "root", "mode": "0775", "owner": "root", "path": "/etc/systemd/system/tangd.socket.d", "secontext": "unconfined_u:object_r:systemd_unit_file_t:s0", "size": 28, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.nbde_server : Creates the file with the port entry that we want tangd to listen to] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:44 Saturday 12 October 2024 13:40:26 -0400 (0:00:00.377) 0:00:27.507 ****** changed: [managed-node3] => { "changed": true, "checksum": "cab519df8c21e60fd06ac780e2c7bd41ad441042", "dest": "/etc/systemd/system/tangd.socket.d/override.conf", "gid": 0, "group": "root", "md5sum": "fc727969e0bd264a9cc7f9c6bc56714c", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:tangd_unit_file_t:s0", "size": 90, "src": "/root/.ansible/tmp/ansible-tmp-1728754826.5752347-11081-278147775395579/.source.conf", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.nbde_server : Set flag to to tell main that the port has changed] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:53 Saturday 12 October 2024 13:40:27 -0400 (0:00:00.672) 0:00:28.180 ****** ok: [managed-node3] => { "ansible_facts": { "__nbde_server_port_changed": true }, "changed": false } TASK [Ensure the desired port is added to firewalld] *************************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:57 Saturday 12 October 2024 13:40:27 -0400 (0:00:00.042) 0:00:28.222 ****** included: fedora.linux_system_roles.firewall for managed-node3 TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 12 October 2024 13:40:27 -0400 (0:00:00.118) 0:00:28.341 ****** included: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node3 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Saturday 12 October 2024 13:40:27 -0400 (0:00:00.041) 0:00:28.382 ****** ok: [managed-node3] TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Saturday 12 October 2024 13:40:27 -0400 (0:00:00.472) 0:00:28.855 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Saturday 12 October 2024 13:40:28 -0400 (0:00:00.354) 0:00:29.210 ****** ok: [managed-node3] => { "ansible_facts": { "__firewall_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 12 October 2024 13:40:28 -0400 (0:00:00.079) 0:00:29.289 ****** ok: [managed-node3] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Saturday 12 October 2024 13:40:28 -0400 (0:00:00.356) 0:00:29.645 ****** ok: [managed-node3] => { "ansible_facts": { "__firewall_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Saturday 12 October 2024 13:40:28 -0400 (0:00:00.042) 0:00:29.687 ****** ok: [managed-node3] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Saturday 12 October 2024 13:40:30 -0400 (0:00:01.571) 0:00:31.259 ****** skipping: [managed-node3] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Saturday 12 October 2024 13:40:30 -0400 (0:00:00.059) 0:00:31.319 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Saturday 12 October 2024 13:40:30 -0400 (0:00:00.065) 0:00:31.385 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 12 October 2024 13:40:30 -0400 (0:00:00.062) 0:00:31.447 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Saturday 12 October 2024 13:40:30 -0400 (0:00:00.063) 0:00:31.511 ****** skipping: [managed-node3] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node3] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Saturday 12 October 2024 13:40:30 -0400 (0:00:00.070) 0:00:31.582 ****** ok: [managed-node3] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "dbus-broker.service sysinit.target polkit.service basic.target dbus.socket system.slice", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "network-pre.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "ip6tables.service shutdown.target ebtables.service ipset.service iptables.service", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13964", "LimitNPROCSoft": "13964", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13964", "LimitSIGPENDINGSoft": "13964", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22342", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Saturday 12 October 2024 13:40:31 -0400 (0:00:00.503) 0:00:32.086 ****** changed: [managed-node3] => { "changed": true, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "polkit.service dbus-broker.service basic.target sysinit.target system.slice dbus.socket", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "network-pre.target shutdown.target", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "iptables.service ip6tables.service ipset.service shutdown.target ebtables.service", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DevicePolicy": "auto", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13964", "LimitNPROCSoft": "13964", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13964", "LimitSIGPENDINGSoft": "13964", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MountAPIVFS": "no", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartKillSignal": "15", "RestartUSec": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22342", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "disabled", "UtmpMode": "init", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Saturday 12 October 2024 13:40:32 -0400 (0:00:01.348) 0:00:33.434 ****** ok: [managed-node3] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.9", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Saturday 12 October 2024 13:40:32 -0400 (0:00:00.050) 0:00:33.485 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Saturday 12 October 2024 13:40:32 -0400 (0:00:00.040) 0:00:33.526 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Saturday 12 October 2024 13:40:32 -0400 (0:00:00.037) 0:00:33.563 ****** changed: [managed-node3] => (item={'port': '7500/tcp', 'zone': 'public', 'state': 'enabled', 'immediate': True, 'permanent': True}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "immediate": true, "permanent": true, "port": "7500/tcp", "state": "enabled", "zone": "public" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Saturday 12 October 2024 13:40:33 -0400 (0:00:01.028) 0:00:34.592 ****** skipping: [managed-node3] => (item={'port': '7500/tcp', 'zone': 'public', 'state': 'enabled', 'immediate': True, 'permanent': True}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "'detailed' in fw[0]", "item": { "immediate": true, "permanent": true, "port": "7500/tcp", "state": "enabled", "zone": "public" }, "skip_reason": "Conditional result was False" } skipping: [managed-node3] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Saturday 12 October 2024 13:40:33 -0400 (0:00:00.057) 0:00:34.649 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "'detailed' in fw[0]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Saturday 12 October 2024 13:40:33 -0400 (0:00:00.049) 0:00:34.699 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Saturday 12 October 2024 13:40:33 -0400 (0:00:00.050) 0:00:34.749 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Saturday 12 October 2024 13:40:33 -0400 (0:00:00.041) 0:00:34.790 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Saturday 12 October 2024 13:40:33 -0400 (0:00:00.039) 0:00:34.830 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Saturday 12 October 2024 13:40:33 -0400 (0:00:00.037) 0:00:34.867 ****** skipping: [managed-node3] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [fedora.linux_system_roles.nbde_server : Reload the daemons so the new changes take effect] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:34 Saturday 12 October 2024 13:40:33 -0400 (0:00:00.045) 0:00:34.913 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.nbde_server : Ensure required services are enabled and at the right state] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:39 Saturday 12 October 2024 13:40:34 -0400 (0:00:00.654) 0:00:35.567 ****** changed: [managed-node3] => (item=tangd.socket) => { "ansible_loop_var": "item", "changed": true, "enabled": true, "item": "tangd.socket", "name": "tangd.socket", "state": "started", "status": { "Accept": "yes", "AccessSELinuxContext": "system_u:object_r:tangd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-10-12 13:39:23 EDT", "ActiveEnterTimestampMonotonic": "368185276", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "system.slice sysinit.target systemd-journald.socket", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-10-12 13:39:23 EDT", "AssertTimestampMonotonic": "368174702", "Backlog": "4096", "Before": "shutdown.target sockets.target", "BindIPv6Only": "default", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "Broadcast": "no", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "4587000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "no", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-10-12 13:39:23 EDT", "ConditionTimestampMonotonic": "368174698", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/tangd.socket", "ControlGroupId": "4137", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DeferAcceptUSec": "0", "Delegate": "no", "Description": "Tang Server socket", "DevicePolicy": "auto", "DirectoryMode": "0755", "Documentation": "\"man:tang(8)\"", "DropInPaths": "/etc/systemd/system/tangd.socket.d/override.conf /etc/systemd/system/tangd.socket.d/override2.conf", "DynamicUser": "no", "ExecStartPre": "{ path=/usr/bin/chown ; argv[]=/usr/bin/chown -R tang:tang /var/db/tang ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorName": "tangd.socket", "FinalKillSignal": "9", "FlushPending": "no", "FragmentPath": "/usr/lib/systemd/system/tangd.socket", "FreeBind": "no", "FreezerState": "running", "GID": "[not set]", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "IPTOS": "-1", "IPTTL": "-1", "Id": "tangd.socket", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2024-10-12 13:39:23 EDT", "InactiveExitTimestampMonotonic": "368180181", "InvocationID": "727b3d2906354e64a2e19a2b74a21d62", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeepAlive": "no", "KeepAliveIntervalUSec": "0", "KeepAliveProbes": "0", "KeepAliveTimeUSec": "0", "KeyringMode": "shared", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13964", "LimitNPROCSoft": "13964", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13964", "LimitSIGPENDINGSoft": "13964", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "Listen": "[::]:7500 (Stream)", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "Mark": "-1", "MaxConnections": "128", "MaxConnectionsPerSource": "0", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "0", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MessageQueueMaxMessages": "0", "MessageQueueMessageSize": "0", "MountAPIVFS": "no", "NAccepted": "0", "NConnections": "0", "NRefused": "0", "NUMAPolicy": "n/a", "Names": "tangd.socket", "NeedDaemonReload": "no", "Nice": "0", "NoDelay": "no", "NoNewPrivileges": "no", "NonBlocking": "no", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PassCredentials": "no", "PassPacketInfo": "no", "PassSecurity": "no", "Perpetual": "no", "PipeSize": "0", "Priority": "-1", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "ReceiveBuffer": "0", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemoveIPC": "no", "RemoveOnStop": "no", "Requires": "sysinit.target system.slice", "RestartKillSignal": "15", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "ReusePort": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "SameProcessGroup": "no", "SecureBits": "0", "SendBuffer": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "SocketMode": "0666", "SocketProtocol": "0", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sat 2024-10-12 13:39:23 EDT", "StateChangeTimestampMonotonic": "368185276", "StateDirectoryMode": "0755", "StopWhenUnneeded": "no", "SubState": "listening", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "0", "TasksMax": "22342", "TimeoutCleanUSec": "infinity", "TimeoutUSec": "1min 30s", "TimerSlackNSec": "50000", "Timestamping": "off", "Transient": "no", "Transparent": "no", "TriggerLimitBurst": "200", "TriggerLimitIntervalUSec": "2s", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "sockets.target", "WatchdogSignal": "6", "Writable": "no" } } TASK [Check tangd socket dir] ************************************************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:58 Saturday 12 October 2024 13:40:35 -0400 (0:00:00.515) 0:00:36.083 ****** ok: [managed-node3] => { "changed": false, "failed_when_result": false, "stat": { "atime": 1728754831.482406, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1728754827.1104064, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 658505922, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0775", "mtime": 1728754827.1104064, "nlink": 2, "path": "/etc/systemd/system/tangd.socket.d", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 49, "uid": 0, "version": "447156668", "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [Check custom file] ******************************************************* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:64 Saturday 12 October 2024 13:40:35 -0400 (0:00:00.353) 0:00:36.437 ****** ok: [managed-node3] => { "changed": false, "failed_when_result": false, "stat": { "atime": 1728754806.8214805, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "05987691cc309e84627f31fa0d1680a3b3b2c4b2", "ctime": 1728754800.518511, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 662700227, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0664", "mtime": 1728754800.1615129, "nlink": 1, "path": "/etc/systemd/system/tangd.socket.d/override2.conf", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 28, "uid": 0, "version": "3714202560", "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Verify role reported no changes] ***************************************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:70 Saturday 12 October 2024 13:40:35 -0400 (0:00:00.355) 0:00:36.793 ****** ok: [managed-node3] => { "changed": false } MSG: All assertions passed TASK [Check for ansible_managed, fingerprint in generated files] *************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:74 Saturday 12 October 2024 13:40:35 -0400 (0:00:00.032) 0:00:36.825 ****** included: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tasks/check_header.yml for managed-node3 TASK [Get file] **************************************************************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tasks/check_header.yml:3 Saturday 12 October 2024 13:40:35 -0400 (0:00:00.040) 0:00:36.865 ****** ok: [managed-node3] => { "changed": false, "content": "IwojIEFuc2libGUgbWFuYWdlZAojCiMgc3lzdGVtX3JvbGU6bmJkZV9zZXJ2ZXIKCltTb2NrZXRdCkxpc3RlblN0cmVhbT0KTGlzdGVuU3RyZWFtPTc1MDAK", "encoding": "base64", "source": "/etc/systemd/system/tangd.socket.d/override.conf" } TASK [Check for presence of ansible managed header, fingerprint] *************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tasks/check_header.yml:9 Saturday 12 October 2024 13:40:36 -0400 (0:00:00.414) 0:00:37.279 ****** ok: [managed-node3] => { "changed": false } MSG: All assertions passed TASK [Remove custom file] ****************************************************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:80 Saturday 12 October 2024 13:40:36 -0400 (0:00:00.059) 0:00:37.339 ****** changed: [managed-node3] => { "changed": true, "path": "/etc/systemd/system/tangd.socket.d/override2.conf", "state": "absent" } TASK [Run the role with default port] ****************************************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:85 Saturday 12 October 2024 13:40:36 -0400 (0:00:00.337) 0:00:37.677 ****** included: fedora.linux_system_roles.nbde_server for managed-node3 TASK [fedora.linux_system_roles.nbde_server : Set version specific variables] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main.yml:6 Saturday 12 October 2024 13:40:36 -0400 (0:00:00.039) 0:00:37.716 ****** included: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml for managed-node3 TASK [fedora.linux_system_roles.nbde_server : Ensure ansible_facts used by role] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:2 Saturday 12 October 2024 13:40:36 -0400 (0:00:00.033) 0:00:37.750 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "__nbde_server_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Check if system is ostree] ******* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:10 Saturday 12 October 2024 13:40:36 -0400 (0:00:00.045) 0:00:37.795 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __nbde_server_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Set flag to indicate system is ostree] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:15 Saturday 12 October 2024 13:40:36 -0400 (0:00:00.025) 0:00:37.821 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "not __nbde_server_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Set platform/version specific variables] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:19 Saturday 12 October 2024 13:40:36 -0400 (0:00:00.024) 0:00:37.845 ****** ok: [managed-node3] => { "ansible_facts": { "__nbde_server_cachedir": "/var/cache/tang", "__nbde_server_group": "tang", "__nbde_server_keydir": "/var/db/tang", "__nbde_server_keygen": "/usr/libexec/tangd-keygen", "__nbde_server_packages": [ "tang" ], "__nbde_server_services": [ "tangd.socket" ], "__nbde_server_update": "/usr/libexec/tangd-update", "__nbde_server_user": "tang" }, "ansible_included_var_files": [ "/tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/vars/default.yml" ], "changed": false } TASK [fedora.linux_system_roles.nbde_server : Include the appropriate provider tasks] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main.yml:9 Saturday 12 October 2024 13:40:36 -0400 (0:00:00.037) 0:00:37.883 ****** included: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml for managed-node3 TASK [fedora.linux_system_roles.nbde_server : Ensure tang is installed] ******** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:2 Saturday 12 October 2024 13:40:36 -0400 (0:00:00.034) 0:00:37.918 ****** ok: [managed-node3] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: tang TASK [fedora.linux_system_roles.nbde_server : Ensure keys are rotated] ********* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:8 Saturday 12 October 2024 13:40:38 -0400 (0:00:01.552) 0:00:39.471 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "nbde_server_rotate_keys | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Ensure we have keys] ************* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:17 Saturday 12 October 2024 13:40:38 -0400 (0:00:00.037) 0:00:39.508 ****** ok: [managed-node3] => { "arguments": { "cachedir": "/var/cache/tang", "force": false, "keydir": "/var/db/tang", "keygen": "/usr/libexec/tangd-keygen", "keys_to_deploy_dir": null, "state": "keys-created", "update": "/usr/libexec/tangd-update" }, "changed": false, "state": "keys-created" } TASK [fedora.linux_system_roles.nbde_server : Perform key management (fetch/deploy) tasks] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:26 Saturday 12 October 2024 13:40:38 -0400 (0:00:00.329) 0:00:39.838 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "(nbde_server_fetch_keys | bool) or (nbde_server_deploy_keys | bool)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Manage firewall and SELinux for port] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:30 Saturday 12 October 2024 13:40:38 -0400 (0:00:00.038) 0:00:39.876 ****** included: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml for managed-node3 TASK [Ensure tang port is labeled tangd_port_t for SELinux] ******************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:2 Saturday 12 October 2024 13:40:38 -0400 (0:00:00.034) 0:00:39.911 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "nbde_server_manage_selinux | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Stat the tangd custom port systemd directory] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:14 Saturday 12 October 2024 13:40:38 -0400 (0:00:00.037) 0:00:39.949 ****** ok: [managed-node3] => { "changed": false, "stat": { "atime": 1728754831.482406, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1728754836.6164052, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 658505922, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0775", "mtime": 1728754836.6164052, "nlink": 2, "path": "/etc/systemd/system/tangd.socket.d", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 27, "uid": 0, "version": "447156668", "wgrp": true, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.nbde_server : Get a list of files in the tangd custom directory] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:19 Saturday 12 October 2024 13:40:39 -0400 (0:00:00.354) 0:00:40.303 ****** ok: [managed-node3] => { "changed": false, "examined": 1, "files": [], "matched": 0, "skipped_paths": {} } MSG: All paths examined TASK [fedora.linux_system_roles.nbde_server : Manage tangd custom port systemd directory] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:35 Saturday 12 October 2024 13:40:39 -0400 (0:00:00.351) 0:00:40.654 ****** changed: [managed-node3] => { "changed": true, "path": "/etc/systemd/system/tangd.socket.d", "state": "absent" } TASK [fedora.linux_system_roles.nbde_server : Creates the file with the port entry that we want tangd to listen to] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:44 Saturday 12 October 2024 13:40:40 -0400 (0:00:00.430) 0:00:41.085 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "nbde_server_port | int != 80", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Set flag to to tell main that the port has changed] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:53 Saturday 12 October 2024 13:40:40 -0400 (0:00:00.055) 0:00:41.140 ****** ok: [managed-node3] => { "ansible_facts": { "__nbde_server_port_changed": true }, "changed": false } TASK [Ensure the desired port is added to firewalld] *************************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:57 Saturday 12 October 2024 13:40:40 -0400 (0:00:00.033) 0:00:41.173 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "nbde_server_manage_firewall | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.nbde_server : Reload the daemons so the new changes take effect] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:34 Saturday 12 October 2024 13:40:40 -0400 (0:00:00.040) 0:00:41.213 ****** ok: [managed-node3] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.nbde_server : Ensure required services are enabled and at the right state] *** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:39 Saturday 12 October 2024 13:40:40 -0400 (0:00:00.644) 0:00:41.858 ****** changed: [managed-node3] => (item=tangd.socket) => { "ansible_loop_var": "item", "changed": true, "enabled": true, "item": "tangd.socket", "name": "tangd.socket", "state": "started", "status": { "Accept": "yes", "AccessSELinuxContext": "system_u:object_r:tangd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2024-10-12 13:40:35 EDT", "ActiveEnterTimestampMonotonic": "440007655", "ActiveExitTimestamp": "Sat 2024-10-12 13:40:34 EDT", "ActiveExitTimestampMonotonic": "439992577", "ActiveState": "active", "After": "sysinit.target systemd-journald.socket system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2024-10-12 13:40:34 EDT", "AssertTimestampMonotonic": "439993043", "Backlog": "4096", "Before": "sockets.target shutdown.target", "BindIPv6Only": "default", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "Broadcast": "no", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "5561000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "no", "CanIsolate": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2024-10-12 13:40:34 EDT", "ConditionTimestampMonotonic": "439993038", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/system.slice/tangd.socket", "ControlGroupId": "4455", "ControlPID": "0", "CoredumpFilter": "0x33", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DeferAcceptUSec": "0", "Delegate": "no", "Description": "Tang Server socket", "DevicePolicy": "auto", "DirectoryMode": "0755", "Documentation": "\"man:tang(8)\"", "DynamicUser": "no", "ExecStartPre": "{ path=/usr/bin/chown ; argv[]=/usr/bin/chown -R tang:tang /var/db/tang ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "FailureAction": "none", "FileDescriptorName": "tangd.socket", "FinalKillSignal": "9", "FlushPending": "no", "FragmentPath": "/usr/lib/systemd/system/tangd.socket", "FreeBind": "no", "FreezerState": "running", "GID": "[not set]", "IOAccounting": "no", "IOReadBytes": "18446744073709551615", "IOReadOperations": "18446744073709551615", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "18446744073709551615", "IOWriteOperations": "18446744073709551615", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "IPTOS": "-1", "IPTTL": "-1", "Id": "tangd.socket", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Sat 2024-10-12 13:40:34 EDT", "InactiveEnterTimestampMonotonic": "439992577", "InactiveExitTimestamp": "Sat 2024-10-12 13:40:34 EDT", "InactiveExitTimestampMonotonic": "440001424", "InvocationID": "a9dfee15398343bd95396c1869b890e6", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeepAlive": "no", "KeepAliveIntervalUSec": "0", "KeepAliveProbes": "0", "KeepAliveTimeUSec": "0", "KeyringMode": "shared", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13964", "LimitNPROCSoft": "13964", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13964", "LimitSIGPENDINGSoft": "13964", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "Listen": "[::]:80 (Stream)", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "Mark": "-1", "MaxConnections": "64", "MaxConnectionsPerSource": "0", "MemoryAccounting": "yes", "MemoryAvailable": "infinity", "MemoryCurrent": "0", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemorySwapMax": "infinity", "MessageQueueMaxMessages": "0", "MessageQueueMessageSize": "0", "MountAPIVFS": "no", "NAccepted": "0", "NConnections": "0", "NRefused": "0", "NUMAPolicy": "n/a", "Names": "tangd.socket", "NeedDaemonReload": "no", "Nice": "0", "NoDelay": "no", "NoNewPrivileges": "no", "NonBlocking": "no", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PassCredentials": "no", "PassPacketInfo": "no", "PassSecurity": "no", "Perpetual": "no", "PipeSize": "0", "Priority": "-1", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivateTmp": "no", "PrivateUsers": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "ReceiveBuffer": "0", "RefuseManualStart": "no", "RefuseManualStop": "no", "RemoveIPC": "no", "RemoveOnStop": "no", "Requires": "system.slice sysinit.target", "RestartKillSignal": "15", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "ReusePort": "no", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "SameProcessGroup": "no", "SecureBits": "0", "SendBuffer": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "Slice": "system.slice", "SocketMode": "0666", "SocketProtocol": "0", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StateChangeTimestamp": "Sat 2024-10-12 13:40:35 EDT", "StateChangeTimestampMonotonic": "440007655", "StateDirectoryMode": "0755", "StopWhenUnneeded": "no", "SubState": "listening", "SuccessAction": "none", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "0", "TasksMax": "22342", "TimeoutCleanUSec": "infinity", "TimeoutUSec": "1min 30s", "TimerSlackNSec": "50000", "Timestamping": "off", "Transient": "no", "Transparent": "no", "TriggerLimitBurst": "200", "TriggerLimitIntervalUSec": "2s", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "sockets.target", "WatchdogSignal": "6", "Writable": "no" } } TASK [Check tangd socket dir is absent] **************************************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:89 Saturday 12 October 2024 13:40:41 -0400 (0:00:00.503) 0:00:42.361 ****** ok: [managed-node3] => { "changed": false, "failed_when_result": false, "stat": { "exists": false } } TASK [Debug] ******************************************************************* task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:96 Saturday 12 October 2024 13:40:41 -0400 (0:00:00.318) 0:00:42.680 ****** ok: [managed-node3] => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.035805", "end": "2024-10-12 13:40:42.063782", "rc": 0, "start": "2024-10-12 13:40:42.027977" } STDOUT: Oct 12 13:33:17 localhost systemd[1]: Reached target Remote File Systems. ░░ Subject: A start job for unit remote-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs.target has finished successfully. ░░ ░░ The job identifier is 40. Oct 12 13:33:17 localhost systemd[1]: Starting dracut pre-mount hook... ░░ Subject: A start job for unit dracut-pre-mount.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-pre-mount.service has begun execution. ░░ ░░ The job identifier is 53. Oct 12 13:33:17 localhost systemd[1]: Finished dracut pre-mount hook. ░░ Subject: A start job for unit dracut-pre-mount.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-pre-mount.service has finished successfully. ░░ ░░ The job identifier is 53. Oct 12 13:33:17 localhost systemd[1]: Starting File System Check on /dev/disk/by-uuid/efa2924f-8850-491c-a9bf-997f8385d98d... ░░ Subject: A start job for unit systemd-fsck-root.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-fsck-root.service has begun execution. ░░ ░░ The job identifier is 37. Oct 12 13:33:17 localhost systemd-fsck[388]: /usr/sbin/fsck.xfs: XFS file system. Oct 12 13:33:17 localhost systemd[1]: Finished File System Check on /dev/disk/by-uuid/efa2924f-8850-491c-a9bf-997f8385d98d. ░░ Subject: A start job for unit systemd-fsck-root.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-fsck-root.service has finished successfully. ░░ ░░ The job identifier is 37. Oct 12 13:33:17 localhost systemd[1]: Mounting /sysroot... ░░ Subject: A start job for unit sysroot.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sysroot.mount has begun execution. ░░ ░░ The job identifier is 36. Oct 12 13:33:17 localhost kernel: SGI XFS with ACLs, security attributes, scrub, quota, no debug enabled Oct 12 13:33:17 localhost kernel: XFS (xvda1): Mounting V5 Filesystem efa2924f-8850-491c-a9bf-997f8385d98d Oct 12 13:33:18 localhost kernel: XFS (xvda1): Ending clean mount Oct 12 13:33:18 localhost systemd[1]: Mounted /sysroot. ░░ Subject: A start job for unit sysroot.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sysroot.mount has finished successfully. ░░ ░░ The job identifier is 36. Oct 12 13:33:18 localhost systemd[1]: Reached target Initrd Root File System. ░░ Subject: A start job for unit initrd-root-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-root-fs.target has finished successfully. ░░ ░░ The job identifier is 35. Oct 12 13:33:18 localhost systemd[1]: Starting Mountpoints Configured in the Real Root... ░░ Subject: A start job for unit initrd-parse-etc.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-parse-etc.service has begun execution. ░░ ░░ The job identifier is 34. Oct 12 13:33:19 localhost systemd[1]: initrd-parse-etc.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit initrd-parse-etc.service has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: Finished Mountpoints Configured in the Real Root. ░░ Subject: A start job for unit initrd-parse-etc.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-parse-etc.service has finished successfully. ░░ ░░ The job identifier is 34. Oct 12 13:33:19 localhost systemd[1]: Reached target Initrd File Systems. ░░ Subject: A start job for unit initrd-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-fs.target has finished successfully. ░░ ░░ The job identifier is 43. Oct 12 13:33:19 localhost systemd[1]: Reached target Initrd Default Target. ░░ Subject: A start job for unit initrd.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd.target has finished successfully. ░░ ░░ The job identifier is 1. Oct 12 13:33:19 localhost systemd[1]: dracut mount hook was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit dracut-mount.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-mount.service has finished successfully. ░░ ░░ The job identifier is 47. Oct 12 13:33:19 localhost systemd[1]: Starting dracut pre-pivot and cleanup hook... ░░ Subject: A start job for unit dracut-pre-pivot.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-pre-pivot.service has begun execution. ░░ ░░ The job identifier is 39. Oct 12 13:33:19 localhost systemd[1]: Finished dracut pre-pivot and cleanup hook. ░░ Subject: A start job for unit dracut-pre-pivot.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-pre-pivot.service has finished successfully. ░░ ░░ The job identifier is 39. Oct 12 13:33:19 localhost systemd[1]: Starting Cleaning Up and Shutting Down Daemons... ░░ Subject: A start job for unit initrd-cleanup.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-cleanup.service has begun execution. ░░ ░░ The job identifier is 56. Oct 12 13:33:19 localhost systemd[1]: Stopped target Network. ░░ Subject: A stop job for unit network.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit network.target has finished. ░░ ░░ The job identifier is 100 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: Stopped target Timer Units. ░░ Subject: A stop job for unit timers.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit timers.target has finished. ░░ ░░ The job identifier is 99 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: dbus.socket: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit dbus.socket has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: Closed D-Bus System Message Bus Socket. ░░ Subject: A stop job for unit dbus.socket has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit dbus.socket has finished. ░░ ░░ The job identifier is 88 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: dracut-pre-pivot.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit dracut-pre-pivot.service has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: Stopped dracut pre-pivot and cleanup hook. ░░ Subject: A stop job for unit dracut-pre-pivot.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit dracut-pre-pivot.service has finished. ░░ ░░ The job identifier is 104 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: Stopped target Initrd Default Target. ░░ Subject: A stop job for unit initrd.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit initrd.target has finished. ░░ ░░ The job identifier is 87 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: Stopped target Basic System. ░░ Subject: A stop job for unit basic.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit basic.target has finished. ░░ ░░ The job identifier is 106 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: Stopped target Initrd Root Device. ░░ Subject: A stop job for unit initrd-root-device.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit initrd-root-device.target has finished. ░░ ░░ The job identifier is 92 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: Stopped target Initrd /usr File System. ░░ Subject: A stop job for unit initrd-usr-fs.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit initrd-usr-fs.target has finished. ░░ ░░ The job identifier is 101 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: Stopped target Path Units. ░░ Subject: A stop job for unit paths.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit paths.target has finished. ░░ ░░ The job identifier is 102 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: systemd-ask-password-console.path: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-ask-password-console.path has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: Stopped Dispatch Password Requests to Console Directory Watch. ░░ Subject: A stop job for unit systemd-ask-password-console.path has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-ask-password-console.path has finished. ░░ ░░ The job identifier is 111 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: Stopped target Remote File Systems. ░░ Subject: A stop job for unit remote-fs.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit remote-fs.target has finished. ░░ ░░ The job identifier is 97 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: Stopped target Preparation for Remote File Systems. ░░ Subject: A stop job for unit remote-fs-pre.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit remote-fs-pre.target has finished. ░░ ░░ The job identifier is 112 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: Stopped target Slice Units. ░░ Subject: A stop job for unit slices.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit slices.target has finished. ░░ ░░ The job identifier is 96 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: Stopped target Socket Units. ░░ Subject: A stop job for unit sockets.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sockets.target has finished. ░░ ░░ The job identifier is 103 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: Stopped target System Initialization. ░░ Subject: A stop job for unit sysinit.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sysinit.target has finished. ░░ ░░ The job identifier is 105 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: Stopped target Local File Systems. ░░ Subject: A stop job for unit local-fs.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit local-fs.target has finished. ░░ ░░ The job identifier is 98 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: Stopped target Swaps. ░░ Subject: A stop job for unit swap.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit swap.target has finished. ░░ ░░ The job identifier is 109 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: dracut-pre-mount.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit dracut-pre-mount.service has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: Stopped dracut pre-mount hook. ░░ Subject: A stop job for unit dracut-pre-mount.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit dracut-pre-mount.service has finished. ░░ ░░ The job identifier is 108 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: dracut-initqueue.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit dracut-initqueue.service has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: Stopped dracut initqueue hook. ░░ Subject: A stop job for unit dracut-initqueue.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit dracut-initqueue.service has finished. ░░ ░░ The job identifier is 93 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: systemd-sysctl.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-sysctl.service has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: Stopped Apply Kernel Variables. ░░ Subject: A stop job for unit systemd-sysctl.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-sysctl.service has finished. ░░ ░░ The job identifier is 114 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: systemd-tmpfiles-setup.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-tmpfiles-setup.service has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: Stopped Create Volatile Files and Directories. ░░ Subject: A stop job for unit systemd-tmpfiles-setup.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-tmpfiles-setup.service has finished. ░░ ░░ The job identifier is 84 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: systemd-udev-trigger.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-udev-trigger.service has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: Stopped Coldplug All udev Devices. ░░ Subject: A stop job for unit systemd-udev-trigger.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-udev-trigger.service has finished. ░░ ░░ The job identifier is 68 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: Stopping Rule-based Manager for Device Events and Files... ░░ Subject: A stop job for unit systemd-udevd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-udevd.service has begun execution. ░░ ░░ The job identifier is 66. Oct 12 13:33:19 localhost systemd[1]: systemd-vconsole-setup.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-vconsole-setup.service has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: Stopped Setup Virtual Console. ░░ Subject: A stop job for unit systemd-vconsole-setup.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-vconsole-setup.service has finished. ░░ ░░ The job identifier is 94 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: run-credentials-systemd\x2dtmpfiles\x2dsetup.service.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-credentials-systemd\x2dtmpfiles\x2dsetup.service.mount has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: run-credentials-systemd\x2dsysctl.service.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-credentials-systemd\x2dsysctl.service.mount has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: initrd-cleanup.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit initrd-cleanup.service has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: Finished Cleaning Up and Shutting Down Daemons. ░░ Subject: A start job for unit initrd-cleanup.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-cleanup.service has finished successfully. ░░ ░░ The job identifier is 56. Oct 12 13:33:19 localhost systemd[1]: systemd-udevd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-udevd.service has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: Stopped Rule-based Manager for Device Events and Files. ░░ Subject: A stop job for unit systemd-udevd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-udevd.service has finished. ░░ ░░ The job identifier is 66 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: systemd-udevd-control.socket: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-udevd-control.socket has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: Closed udev Control Socket. ░░ Subject: A stop job for unit systemd-udevd-control.socket has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-udevd-control.socket has finished. ░░ ░░ The job identifier is 65 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: systemd-udevd-kernel.socket: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-udevd-kernel.socket has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: Closed udev Kernel Socket. ░░ Subject: A stop job for unit systemd-udevd-kernel.socket has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-udevd-kernel.socket has finished. ░░ ░░ The job identifier is 67 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: dracut-pre-udev.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit dracut-pre-udev.service has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: Stopped dracut pre-udev hook. ░░ Subject: A stop job for unit dracut-pre-udev.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit dracut-pre-udev.service has finished. ░░ ░░ The job identifier is 110 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: dracut-cmdline.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit dracut-cmdline.service has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: Stopped dracut cmdline hook. ░░ Subject: A stop job for unit dracut-cmdline.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit dracut-cmdline.service has finished. ░░ ░░ The job identifier is 95 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: Starting Cleanup udev Database... ░░ Subject: A start job for unit initrd-udevadm-cleanup-db.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-udevadm-cleanup-db.service has begun execution. ░░ ░░ The job identifier is 63. Oct 12 13:33:19 localhost systemd[1]: systemd-tmpfiles-setup-dev.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-tmpfiles-setup-dev.service has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: Stopped Create Static Device Nodes in /dev. ░░ Subject: A stop job for unit systemd-tmpfiles-setup-dev.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-tmpfiles-setup-dev.service has finished. ░░ ░░ The job identifier is 85 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: kmod-static-nodes.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit kmod-static-nodes.service has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: Stopped Create List of Static Device Nodes. ░░ Subject: A stop job for unit kmod-static-nodes.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit kmod-static-nodes.service has finished. ░░ ░░ The job identifier is 113 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: systemd-sysusers.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-sysusers.service has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: Stopped Create System Users. ░░ Subject: A stop job for unit systemd-sysusers.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit systemd-sysusers.service has finished. ░░ ░░ The job identifier is 83 and the job result is done. Oct 12 13:33:19 localhost systemd[1]: run-credentials-systemd\x2dtmpfiles\x2dsetup\x2ddev.service.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-credentials-systemd\x2dtmpfiles\x2dsetup\x2ddev.service.mount has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: run-credentials-systemd\x2dsysusers.service.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-credentials-systemd\x2dsysusers.service.mount has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: initrd-udevadm-cleanup-db.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit initrd-udevadm-cleanup-db.service has successfully entered the 'dead' state. Oct 12 13:33:19 localhost systemd[1]: Finished Cleanup udev Database. ░░ Subject: A start job for unit initrd-udevadm-cleanup-db.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-udevadm-cleanup-db.service has finished successfully. ░░ ░░ The job identifier is 63. Oct 12 13:33:19 localhost systemd[1]: Reached target Switch Root. ░░ Subject: A start job for unit initrd-switch-root.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-switch-root.target has finished successfully. ░░ ░░ The job identifier is 59. Oct 12 13:33:19 localhost systemd[1]: Starting Switch Root... ░░ Subject: A start job for unit initrd-switch-root.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit initrd-switch-root.service has begun execution. ░░ ░░ The job identifier is 60. Oct 12 13:33:19 localhost systemd[1]: Switching root. Oct 12 13:33:19 localhost systemd-journald[231]: Journal stopped ░░ Subject: The journal has been stopped ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The system journal process has shut down and closed all currently ░░ active journal files. Oct 12 13:33:33 localhost systemd-journald[231]: Received SIGTERM from PID 1 (systemd). Oct 12 13:33:33 localhost kernel: audit: type=1404 audit(1728754404.363:2): enforcing=1 old_enforcing=0 auid=4294967295 ses=4294967295 enabled=1 old-enabled=1 lsm=selinux res=1 Oct 12 13:33:33 localhost kernel: SELinux: policy capability network_peer_controls=1 Oct 12 13:33:33 localhost kernel: SELinux: policy capability open_perms=1 Oct 12 13:33:33 localhost kernel: SELinux: policy capability extended_socket_class=1 Oct 12 13:33:33 localhost kernel: SELinux: policy capability always_check_network=0 Oct 12 13:33:33 localhost kernel: SELinux: policy capability cgroup_seclabel=1 Oct 12 13:33:33 localhost kernel: SELinux: policy capability nnp_nosuid_transition=1 Oct 12 13:33:33 localhost kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Oct 12 13:33:33 localhost kernel: audit: type=1403 audit(1728754405.417:3): auid=4294967295 ses=4294967295 lsm=selinux res=1 Oct 12 13:33:33 localhost systemd[1]: Successfully loaded SELinux policy in 1.131687s. Oct 12 13:33:33 localhost systemd[1]: Relabelled /dev, /dev/shm, /run, /sys/fs/cgroup in 19.298ms. Oct 12 13:33:33 localhost systemd[1]: systemd 252-47.el9 running in system mode (+PAM +AUDIT +SELINUX -APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN -IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 -PWQUALITY +P11KIT -QRENCODE +TPM2 +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -BPF_FRAMEWORK +XKBCOMMON +UTMP +SYSVINIT default-hierarchy=unified) Oct 12 13:33:33 localhost systemd[1]: Detected virtualization xen. Oct 12 13:33:33 localhost systemd[1]: Detected architecture x86-64. Oct 12 13:33:33 localhost systemd[1]: Initializing machine ID from random generator. Oct 12 13:33:33 localhost systemd[1]: Installed transient /etc/machine-id file. Oct 12 13:33:33 localhost systemd-rc-local-generator[445]: /etc/rc.d/rc.local is not marked executable, skipping. Oct 12 13:33:33 localhost systemd[1]: initrd-switch-root.service: Deactivated successfully. Oct 12 13:33:33 localhost systemd[1]: Stopped Switch Root. Oct 12 13:33:33 localhost systemd[1]: systemd-journald.service: Scheduled restart job, restart counter is at 1. Oct 12 13:33:33 localhost systemd[1]: Created slice Slice /system/getty. Oct 12 13:33:33 localhost systemd[1]: Created slice Slice /system/modprobe. Oct 12 13:33:33 localhost systemd[1]: Created slice Slice /system/serial-getty. Oct 12 13:33:33 localhost systemd[1]: Created slice Slice /system/sshd-keygen. Oct 12 13:33:33 localhost systemd[1]: Created slice User and Session Slice. Oct 12 13:33:33 localhost systemd[1]: Started Dispatch Password Requests to Console Directory Watch. Oct 12 13:33:33 localhost systemd[1]: Started Forward Password Requests to Wall Directory Watch. Oct 12 13:33:33 localhost systemd[1]: Set up automount Arbitrary Executable File Formats File System Automount Point. Oct 12 13:33:33 localhost systemd[1]: Reached target Local Encrypted Volumes. Oct 12 13:33:33 localhost systemd[1]: Stopped target Switch Root. Oct 12 13:33:33 localhost systemd[1]: Stopped target Initrd File Systems. Oct 12 13:33:33 localhost systemd[1]: Stopped target Initrd Root File System. Oct 12 13:33:33 localhost systemd[1]: Reached target Local Integrity Protected Volumes. Oct 12 13:33:33 localhost systemd[1]: Reached target Path Units. Oct 12 13:33:33 localhost systemd[1]: Reached target Slice Units. Oct 12 13:33:33 localhost systemd[1]: Reached target Swaps. Oct 12 13:33:33 localhost systemd[1]: Reached target Local Verity Protected Volumes. Oct 12 13:33:33 localhost systemd[1]: Listening on RPCbind Server Activation Socket. Oct 12 13:33:33 localhost systemd[1]: Reached target RPC Port Mapper. Oct 12 13:33:33 localhost systemd[1]: Listening on Process Core Dump Socket. Oct 12 13:33:33 localhost systemd[1]: Listening on initctl Compatibility Named Pipe. Oct 12 13:33:33 localhost systemd[1]: Listening on udev Control Socket. Oct 12 13:33:33 localhost systemd[1]: Listening on udev Kernel Socket. Oct 12 13:33:33 localhost systemd[1]: Mounting Huge Pages File System... Oct 12 13:33:33 localhost systemd[1]: Mounting POSIX Message Queue File System... Oct 12 13:33:33 localhost systemd[1]: Mounting Kernel Debug File System... Oct 12 13:33:33 localhost systemd[1]: Mounting Kernel Trace File System... Oct 12 13:33:33 localhost systemd[1]: Kernel Module supporting RPCSEC_GSS was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab). Oct 12 13:33:33 localhost systemd[1]: Starting Create List of Static Device Nodes... Oct 12 13:33:33 localhost systemd[1]: Starting Load Kernel Module configfs... Oct 12 13:33:33 localhost systemd[1]: Starting Load Kernel Module drm... Oct 12 13:33:33 localhost systemd[1]: Starting Load Kernel Module fuse... Oct 12 13:33:33 localhost systemd[1]: Starting Read and set NIS domainname from /etc/sysconfig/network... Oct 12 13:33:33 localhost systemd[1]: systemd-fsck-root.service: Deactivated successfully. Oct 12 13:33:33 localhost systemd[1]: Stopped File System Check on Root Device. Oct 12 13:33:33 localhost systemd[1]: Stopped Journal Service. Oct 12 13:33:33 localhost systemd[1]: Starting Journal Service... Oct 12 13:33:33 localhost systemd[1]: Load Kernel Modules was skipped because no trigger condition checks were met. Oct 12 13:33:33 localhost systemd[1]: Starting Generate network units from Kernel command line... Oct 12 13:33:33 localhost systemd[1]: TPM2 PCR Machine ID Measurement was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f). Oct 12 13:33:33 localhost systemd[1]: Starting Remount Root and Kernel File Systems... Oct 12 13:33:33 localhost systemd[1]: Repartition Root Disk was skipped because no trigger condition checks were met. Oct 12 13:33:33 localhost systemd[1]: Starting Apply Kernel Variables... Oct 12 13:33:33 localhost systemd[1]: Starting Coldplug All udev Devices... Oct 12 13:33:33 localhost systemd[1]: Mounted Huge Pages File System. Oct 12 13:33:33 localhost systemd[1]: Mounted POSIX Message Queue File System. Oct 12 13:33:33 localhost systemd[1]: Mounted Kernel Debug File System. Oct 12 13:33:33 localhost systemd[1]: Mounted Kernel Trace File System. Oct 12 13:33:33 localhost systemd[1]: Finished Read and set NIS domainname from /etc/sysconfig/network. Oct 12 13:33:33 localhost systemd[1]: Finished Remount Root and Kernel File Systems. Oct 12 13:33:33 localhost systemd[1]: First Boot Wizard was skipped because of an unmet condition check (ConditionFirstBoot=yes). Oct 12 13:33:33 localhost systemd-journald[479]: Journal started ░░ Subject: The journal has been started ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The system journal process has started up, opened the journal ░░ files for writing and is now ready to process requests. Oct 12 13:33:33 localhost systemd-journald[479]: Runtime Journal (/run/log/journal/9cbc1bf5e5cd4ec4b4004c5b0c25e2a0) is 8.0M, max 70.6M, 62.6M free. ░░ Subject: Disk space used by the journal ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Runtime Journal (/run/log/journal/9cbc1bf5e5cd4ec4b4004c5b0c25e2a0) is currently using 8.0M. ░░ Maximum allowed usage is set to 70.6M. ░░ Leaving at least 35.3M free (of currently available 690.3M of disk space). ░░ Enforced usage limit is thus 70.6M, of which 62.6M are still available. ░░ ░░ The limits controlling how much disk space is used by the journal may ░░ be configured with SystemMaxUse=, SystemKeepFree=, SystemMaxFileSize=, ░░ RuntimeMaxUse=, RuntimeKeepFree=, RuntimeMaxFileSize= settings in ░░ /etc/systemd/journald.conf. See journald.conf(5) for details. Oct 12 13:33:33 localhost systemd[1]: Queued start job for default target Multi-User System. Oct 12 13:33:33 localhost systemd[1]: systemd-journald.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-journald.service has successfully entered the 'dead' state. Oct 12 13:33:33 localhost systemd[1]: Rebuild Hardware Database was skipped because of an unmet condition check (ConditionNeedsUpdate=/etc). Oct 12 13:33:33 localhost systemd[1]: Starting Load/Save OS Random Seed... Oct 12 13:33:33 localhost systemd[1]: Create System Users was skipped because no trigger condition checks were met. Oct 12 13:33:33 localhost systemd[1]: Started Journal Service. Oct 12 13:33:33 localhost systemd[1]: Finished Generate network units from Kernel command line. ░░ Subject: A start job for unit systemd-network-generator.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-network-generator.service has finished successfully. ░░ ░░ The job identifier is 154. Oct 12 13:33:33 localhost systemd[1]: Starting Flush Journal to Persistent Storage... ░░ Subject: A start job for unit systemd-journal-flush.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-flush.service has begun execution. ░░ ░░ The job identifier is 133. Oct 12 13:33:33 localhost systemd[1]: Finished Create List of Static Device Nodes. ░░ Subject: A start job for unit kmod-static-nodes.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kmod-static-nodes.service has finished successfully. ░░ ░░ The job identifier is 120. Oct 12 13:33:33 localhost systemd-journald[479]: Runtime Journal (/run/log/journal/9cbc1bf5e5cd4ec4b4004c5b0c25e2a0) is 8.0M, max 70.6M, 62.6M free. ░░ Subject: Disk space used by the journal ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ Runtime Journal (/run/log/journal/9cbc1bf5e5cd4ec4b4004c5b0c25e2a0) is currently using 8.0M. ░░ Maximum allowed usage is set to 70.6M. ░░ Leaving at least 35.3M free (of currently available 690.3M of disk space). ░░ Enforced usage limit is thus 70.6M, of which 62.6M are still available. ░░ ░░ The limits controlling how much disk space is used by the journal may ░░ be configured with SystemMaxUse=, SystemKeepFree=, SystemMaxFileSize=, ░░ RuntimeMaxUse=, RuntimeKeepFree=, RuntimeMaxFileSize= settings in ░░ /etc/systemd/journald.conf. See journald.conf(5) for details. Oct 12 13:33:33 localhost systemd-journald[479]: Received client request to flush runtime journal. Oct 12 13:33:33 localhost systemd[1]: Starting Create Static Device Nodes in /dev... ░░ Subject: A start job for unit systemd-tmpfiles-setup-dev.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup-dev.service has begun execution. ░░ ░░ The job identifier is 147. Oct 12 13:33:33 localhost systemd[1]: Finished Flush Journal to Persistent Storage. ░░ Subject: A start job for unit systemd-journal-flush.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-flush.service has finished successfully. ░░ ░░ The job identifier is 133. Oct 12 13:33:33 localhost systemd[1]: Finished Apply Kernel Variables. ░░ Subject: A start job for unit systemd-sysctl.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-sysctl.service has finished successfully. ░░ ░░ The job identifier is 124. Oct 12 13:33:33 localhost systemd[1]: Finished Load/Save OS Random Seed. ░░ Subject: A start job for unit systemd-random-seed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-random-seed.service has finished successfully. ░░ ░░ The job identifier is 136. Oct 12 13:33:33 localhost systemd[1]: First Boot Complete was skipped because of an unmet condition check (ConditionFirstBoot=yes). ░░ Subject: A start job for unit first-boot-complete.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit first-boot-complete.target has finished successfully. ░░ ░░ The job identifier is 137. Oct 12 13:33:33 localhost systemd[1]: Finished Coldplug All udev Devices. ░░ Subject: A start job for unit systemd-udev-trigger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udev-trigger.service has finished successfully. ░░ ░░ The job identifier is 163. Oct 12 13:33:34 localhost systemd[1]: Finished Create Static Device Nodes in /dev. ░░ Subject: A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup-dev.service has finished successfully. ░░ ░░ The job identifier is 147. Oct 12 13:33:34 localhost systemd[1]: Reached target Preparation for Local File Systems. ░░ Subject: A start job for unit local-fs-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit local-fs-pre.target has finished successfully. ░░ ░░ The job identifier is 152. Oct 12 13:33:34 localhost systemd[1]: Reached target Local File Systems. ░░ Subject: A start job for unit local-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit local-fs.target has finished successfully. ░░ ░░ The job identifier is 150. Oct 12 13:33:34 localhost systemd[1]: Rebuild Dynamic Linker Cache was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit ldconfig.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit ldconfig.service has finished successfully. ░░ ░░ The job identifier is 135. Oct 12 13:33:34 localhost systemd[1]: Mark the need to relabel after reboot was skipped because of an unmet condition check (ConditionSecurity=!selinux). ░░ Subject: A start job for unit selinux-autorelabel-mark.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit selinux-autorelabel-mark.service has finished successfully. ░░ ░░ The job identifier is 161. Oct 12 13:33:34 localhost systemd[1]: Set Up Additional Binary Formats was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-binfmt.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-binfmt.service has finished successfully. ░░ ░░ The job identifier is 177. Oct 12 13:33:34 localhost systemd[1]: Update Boot Loader Random Seed was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-boot-random-seed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-boot-random-seed.service has finished successfully. ░░ ░░ The job identifier is 153. Oct 12 13:33:34 localhost systemd[1]: Starting Automatic Boot Loader Update... ░░ Subject: A start job for unit systemd-boot-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-boot-update.service has begun execution. ░░ ░░ The job identifier is 148. Oct 12 13:33:34 localhost systemd[1]: Starting Commit a transient machine-id on disk... ░░ Subject: A start job for unit systemd-machine-id-commit.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-machine-id-commit.service has begun execution. ░░ ░░ The job identifier is 168. Oct 12 13:33:34 localhost systemd[1]: Starting Create Volatile Files and Directories... ░░ Subject: A start job for unit systemd-tmpfiles-setup.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup.service has begun execution. ░░ ░░ The job identifier is 145. Oct 12 13:33:34 localhost systemd[1]: Starting Rule-based Manager for Device Events and Files... ░░ Subject: A start job for unit systemd-udevd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udevd.service has begun execution. ░░ ░░ The job identifier is 164. Oct 12 13:33:34 localhost bootctl[490]: Couldn't find EFI system partition, skipping. Oct 12 13:33:34 localhost systemd[1]: Finished Automatic Boot Loader Update. ░░ Subject: A start job for unit systemd-boot-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-boot-update.service has finished successfully. ░░ ░░ The job identifier is 148. Oct 12 13:33:34 localhost systemd-udevd[493]: Using default interface naming scheme 'rhel-9.0'. Oct 12 13:33:34 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@configfs.service has successfully entered the 'dead' state. Oct 12 13:33:34 localhost systemd[1]: Finished Load Kernel Module configfs. ░░ Subject: A start job for unit modprobe@configfs.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has finished successfully. ░░ ░░ The job identifier is 171. Oct 12 13:33:34 localhost systemd[1]: Mounting Kernel Configuration File System... ░░ Subject: A start job for unit sys-kernel-config.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-kernel-config.mount has begun execution. ░░ ░░ The job identifier is 170. Oct 12 13:33:34 localhost systemd[1]: Mounted Kernel Configuration File System. ░░ Subject: A start job for unit sys-kernel-config.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-kernel-config.mount has finished successfully. ░░ ░░ The job identifier is 170. Oct 12 13:33:34 localhost kernel: fuse: init (API version 7.36) Oct 12 13:33:34 localhost systemd[1]: modprobe@fuse.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@fuse.service has successfully entered the 'dead' state. Oct 12 13:33:34 localhost systemd[1]: Finished Load Kernel Module fuse. ░░ Subject: A start job for unit modprobe@fuse.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@fuse.service has finished successfully. ░░ ░░ The job identifier is 176. Oct 12 13:33:34 localhost systemd[1]: Finished Commit a transient machine-id on disk. ░░ Subject: A start job for unit systemd-machine-id-commit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-machine-id-commit.service has finished successfully. ░░ ░░ The job identifier is 168. Oct 12 13:33:34 localhost systemd[1]: Finished Create Volatile Files and Directories. ░░ Subject: A start job for unit systemd-tmpfiles-setup.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-setup.service has finished successfully. ░░ ░░ The job identifier is 145. Oct 12 13:33:34 localhost systemd[1]: Starting Security Auditing Service... ░░ Subject: A start job for unit auditd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has begun execution. ░░ ░░ The job identifier is 227. Oct 12 13:33:34 localhost systemd[1]: Starting RPC Bind... ░░ Subject: A start job for unit rpcbind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpcbind.service has begun execution. ░░ ░░ The job identifier is 199. Oct 12 13:33:34 localhost systemd[1]: Rebuild Journal Catalog was skipped because of an unmet condition check (ConditionNeedsUpdate=/var). ░░ Subject: A start job for unit systemd-journal-catalog-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-journal-catalog-update.service has finished successfully. ░░ ░░ The job identifier is 127. Oct 12 13:33:34 localhost systemd[1]: Update is Completed was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit systemd-update-done.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-done.service has finished successfully. ░░ ░░ The job identifier is 158. Oct 12 13:33:34 localhost kernel: ACPI: bus type drm_connector registered Oct 12 13:33:34 localhost systemd[1]: modprobe@drm.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@drm.service has successfully entered the 'dead' state. Oct 12 13:33:34 localhost systemd[1]: Finished Load Kernel Module drm. ░░ Subject: A start job for unit modprobe@drm.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@drm.service has finished successfully. ░░ ░░ The job identifier is 211. Oct 12 13:33:34 localhost systemd[1]: etc-machine\x2did.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit etc-machine\x2did.mount has successfully entered the 'dead' state. Oct 12 13:33:34 localhost systemd[1]: Mounting FUSE Control File System... ░░ Subject: A start job for unit sys-fs-fuse-connections.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-fs-fuse-connections.mount has begun execution. ░░ ░░ The job identifier is 175. Oct 12 13:33:34 localhost systemd[1]: Mounting RPC Pipe File System... ░░ Subject: A start job for unit var-lib-nfs-rpc_pipefs.mount has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit var-lib-nfs-rpc_pipefs.mount has begun execution. ░░ ░░ The job identifier is 206. Oct 12 13:33:34 localhost systemd[1]: Mounted FUSE Control File System. ░░ Subject: A start job for unit sys-fs-fuse-connections.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sys-fs-fuse-connections.mount has finished successfully. ░░ ░░ The job identifier is 175. Oct 12 13:33:34 localhost kernel: RPC: Registered named UNIX socket transport module. Oct 12 13:33:34 localhost kernel: RPC: Registered udp transport module. Oct 12 13:33:34 localhost kernel: RPC: Registered tcp transport module. Oct 12 13:33:34 localhost kernel: RPC: Registered tcp-with-tls transport module. Oct 12 13:33:34 localhost kernel: RPC: Registered tcp NFSv4.1 backchannel transport module. Oct 12 13:33:34 localhost systemd[1]: Mounted RPC Pipe File System. ░░ Subject: A start job for unit var-lib-nfs-rpc_pipefs.mount has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit var-lib-nfs-rpc_pipefs.mount has finished successfully. ░░ ░░ The job identifier is 206. Oct 12 13:33:34 localhost systemd[1]: Reached target rpc_pipefs.target. ░░ Subject: A start job for unit rpc_pipefs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc_pipefs.target has finished successfully. ░░ ░░ The job identifier is 205. Oct 12 13:33:34 localhost systemd[1]: Started Rule-based Manager for Device Events and Files. ░░ Subject: A start job for unit systemd-udevd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-udevd.service has finished successfully. ░░ ░░ The job identifier is 164. Oct 12 13:33:34 localhost systemd[1]: Starting Load Kernel Module configfs... ░░ Subject: A start job for unit modprobe@configfs.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has begun execution. ░░ ░░ The job identifier is 262. Oct 12 13:33:34 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit modprobe@configfs.service has successfully entered the 'dead' state. Oct 12 13:33:34 localhost systemd[1]: Finished Load Kernel Module configfs. ░░ Subject: A start job for unit modprobe@configfs.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit modprobe@configfs.service has finished successfully. ░░ ░░ The job identifier is 262. Oct 12 13:33:34 localhost systemd[1]: Condition check resulted in /dev/ttyS0 being skipped. ░░ Subject: A start job for unit dev-ttyS0.device has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dev-ttyS0.device has finished successfully. ░░ ░░ The job identifier is 244. Oct 12 13:33:35 localhost systemd[1]: Started RPC Bind. ░░ Subject: A start job for unit rpcbind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpcbind.service has finished successfully. ░░ ░░ The job identifier is 199. Oct 12 13:33:35 localhost kernel: input: PC Speaker as /devices/platform/pcspkr/input/input5 Oct 12 13:33:35 localhost auditd[533]: No plugins found, not dispatching events Oct 12 13:33:35 localhost auditd[533]: Init complete, auditd 3.1.5 listening for events (startup state enable) Oct 12 13:33:35 localhost kernel: RAPL PMU: API unit is 2^-32 Joules, 0 fixed counters, 655360 ms ovfl timer Oct 12 13:33:35 localhost kernel: cirrus 0000:00:02.0: vgaarb: deactivate vga console Oct 12 13:33:35 localhost kernel: Console: switching to colour dummy device 80x25 Oct 12 13:33:35 localhost kernel: [drm] Initialized cirrus 2.0.0 2019 for 0000:00:02.0 on minor 0 Oct 12 13:33:35 localhost kernel: fbcon: cirrusdrmfb (fb0) is primary device Oct 12 13:33:35 localhost kernel: Console: switching to colour frame buffer device 128x48 Oct 12 13:33:35 localhost kernel: cirrus 0000:00:02.0: [drm] fb0: cirrusdrmfb frame buffer device Oct 12 13:33:35 localhost systemd-udevd[519]: Network interface NamePolicy= disabled on kernel command line. Oct 12 13:33:35 localhost kernel: piix4_smbus 0000:00:01.3: SMBus base address uninitialized - upgrade BIOS or use force_addr=0xaddr Oct 12 13:33:35 localhost augenrules[536]: /sbin/augenrules: No change Oct 12 13:33:35 localhost augenrules[564]: No rules Oct 12 13:33:35 localhost augenrules[564]: enabled 1 Oct 12 13:33:35 localhost augenrules[564]: failure 1 Oct 12 13:33:35 localhost augenrules[564]: pid 533 Oct 12 13:33:35 localhost augenrules[564]: rate_limit 0 Oct 12 13:33:35 localhost augenrules[564]: backlog_limit 8192 Oct 12 13:33:35 localhost augenrules[564]: lost 0 Oct 12 13:33:35 localhost augenrules[564]: backlog 2 Oct 12 13:33:35 localhost augenrules[564]: backlog_wait_time 60000 Oct 12 13:33:35 localhost augenrules[564]: backlog_wait_time_actual 0 Oct 12 13:33:35 localhost augenrules[564]: enabled 1 Oct 12 13:33:35 localhost augenrules[564]: failure 1 Oct 12 13:33:35 localhost augenrules[564]: pid 533 Oct 12 13:33:35 localhost augenrules[564]: rate_limit 0 Oct 12 13:33:35 localhost augenrules[564]: backlog_limit 8192 Oct 12 13:33:35 localhost augenrules[564]: lost 0 Oct 12 13:33:35 localhost augenrules[564]: backlog 0 Oct 12 13:33:35 localhost augenrules[564]: backlog_wait_time 60000 Oct 12 13:33:35 localhost augenrules[564]: backlog_wait_time_actual 0 Oct 12 13:33:35 localhost augenrules[564]: enabled 1 Oct 12 13:33:35 localhost augenrules[564]: failure 1 Oct 12 13:33:35 localhost augenrules[564]: pid 533 Oct 12 13:33:35 localhost augenrules[564]: rate_limit 0 Oct 12 13:33:35 localhost augenrules[564]: backlog_limit 8192 Oct 12 13:33:35 localhost augenrules[564]: lost 0 Oct 12 13:33:35 localhost augenrules[564]: backlog 0 Oct 12 13:33:35 localhost augenrules[564]: backlog_wait_time 60000 Oct 12 13:33:35 localhost augenrules[564]: backlog_wait_time_actual 0 Oct 12 13:33:35 localhost systemd[1]: Started Security Auditing Service. ░░ Subject: A start job for unit auditd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit auditd.service has finished successfully. ░░ ░░ The job identifier is 227. Oct 12 13:33:35 localhost systemd[1]: Starting Record System Boot/Shutdown in UTMP... ░░ Subject: A start job for unit systemd-update-utmp.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp.service has begun execution. ░░ ░░ The job identifier is 229. Oct 12 13:33:35 localhost systemd[1]: Finished Record System Boot/Shutdown in UTMP. ░░ Subject: A start job for unit systemd-update-utmp.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp.service has finished successfully. ░░ ░░ The job identifier is 229. Oct 12 13:33:35 localhost systemd[1]: Reached target System Initialization. ░░ Subject: A start job for unit sysinit.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sysinit.target has finished successfully. ░░ ░░ The job identifier is 119. Oct 12 13:33:35 localhost systemd[1]: Started dnf makecache --timer. ░░ Subject: A start job for unit dnf-makecache.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dnf-makecache.timer has finished successfully. ░░ ░░ The job identifier is 188. Oct 12 13:33:35 localhost systemd[1]: Started Daily rotation of log files. ░░ Subject: A start job for unit logrotate.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.timer has finished successfully. ░░ ░░ The job identifier is 194. Oct 12 13:33:35 localhost systemd[1]: Started Daily Cleanup of Temporary Directories. ░░ Subject: A start job for unit systemd-tmpfiles-clean.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-tmpfiles-clean.timer has finished successfully. ░░ ░░ The job identifier is 195. Oct 12 13:33:35 localhost systemd[1]: Reached target Timer Units. ░░ Subject: A start job for unit timers.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit timers.target has finished successfully. ░░ ░░ The job identifier is 187. Oct 12 13:33:35 localhost systemd[1]: Listening on D-Bus System Message Bus Socket. ░░ Subject: A start job for unit dbus.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus.socket has finished successfully. ░░ ░░ The job identifier is 182. Oct 12 13:33:35 localhost systemd[1]: Listening on SSSD Kerberos Cache Manager responder socket. ░░ Subject: A start job for unit sssd-kcm.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sssd-kcm.socket has finished successfully. ░░ ░░ The job identifier is 184. Oct 12 13:33:35 localhost systemd[1]: Reached target Socket Units. ░░ Subject: A start job for unit sockets.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sockets.target has finished successfully. ░░ ░░ The job identifier is 179. Oct 12 13:33:35 localhost systemd[1]: Starting D-Bus System Message Bus... ░░ Subject: A start job for unit dbus-broker.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus-broker.service has begun execution. ░░ ░░ The job identifier is 192. Oct 12 13:33:35 localhost systemd[1]: TPM2 PCR Barrier (Initialization) was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f). ░░ Subject: A start job for unit systemd-pcrphase-sysinit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pcrphase-sysinit.service has finished successfully. ░░ ░░ The job identifier is 122. Oct 12 13:33:36 localhost systemd[1]: Started D-Bus System Message Bus. ░░ Subject: A start job for unit dbus-broker.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dbus-broker.service has finished successfully. ░░ ░░ The job identifier is 192. Oct 12 13:33:36 localhost systemd[1]: Reached target Basic System. ░░ Subject: A start job for unit basic.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit basic.target has finished successfully. ░░ ░░ The job identifier is 116. Oct 12 13:33:36 localhost dbus-broker-lau[577]: Ready Oct 12 13:33:36 localhost systemd[1]: Starting NTP client/server... ░░ Subject: A start job for unit chronyd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit chronyd.service has begun execution. ░░ ░░ The job identifier is 235. Oct 12 13:33:36 localhost systemd[1]: Starting Initial cloud-init job (pre-networking)... ░░ Subject: A start job for unit cloud-init-local.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init-local.service has begun execution. ░░ ░░ The job identifier is 217. Oct 12 13:33:36 localhost systemd[1]: Starting Restore /run/initramfs on shutdown... ░░ Subject: A start job for unit dracut-shutdown.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-shutdown.service has begun execution. ░░ ░░ The job identifier is 169. Oct 12 13:33:36 localhost systemd[1]: Started irqbalance daemon. ░░ Subject: A start job for unit irqbalance.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit irqbalance.service has finished successfully. ░░ ░░ The job identifier is 247. Oct 12 13:33:36 localhost systemd[1]: Load CPU microcode update was skipped because of an unmet condition check (ConditionPathExists=/sys/devices/system/cpu/microcode/reload). ░░ Subject: A start job for unit microcode.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit microcode.service has finished successfully. ░░ ░░ The job identifier is 196. Oct 12 13:33:36 localhost systemd[1]: Started Hardware RNG Entropy Gatherer Daemon. ░░ Subject: A start job for unit rngd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rngd.service has finished successfully. ░░ ░░ The job identifier is 232. Oct 12 13:33:36 localhost systemd[1]: OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ecdsa.service has finished successfully. ░░ ░░ The job identifier is 220. Oct 12 13:33:36 localhost systemd[1]: OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ed25519.service has finished successfully. ░░ ░░ The job identifier is 223. Oct 12 13:33:36 localhost systemd[1]: OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@rsa.service has finished successfully. ░░ ░░ The job identifier is 222. Oct 12 13:33:36 localhost systemd[1]: Reached target sshd-keygen.target. ░░ Subject: A start job for unit sshd-keygen.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen.target has finished successfully. ░░ ░░ The job identifier is 219. Oct 12 13:33:36 localhost systemd[1]: System Security Services Daemon was skipped because no trigger condition checks were met. ░░ Subject: A start job for unit sssd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sssd.service has finished successfully. ░░ ░░ The job identifier is 233. Oct 12 13:33:36 localhost systemd[1]: Reached target User and Group Name Lookups. ░░ Subject: A start job for unit nss-user-lookup.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit nss-user-lookup.target has finished successfully. ░░ ░░ The job identifier is 234. Oct 12 13:33:36 localhost systemd[1]: Starting User Login Management... ░░ Subject: A start job for unit systemd-logind.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has begun execution. ░░ ░░ The job identifier is 210. Oct 12 13:33:36 localhost systemd[1]: Starting Rotate log files... ░░ Subject: A start job for unit logrotate.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has begun execution. ░░ ░░ The job identifier is 266. Oct 12 13:33:36 localhost systemd[1]: Finished Restore /run/initramfs on shutdown. ░░ Subject: A start job for unit dracut-shutdown.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit dracut-shutdown.service has finished successfully. ░░ ░░ The job identifier is 169. Oct 12 13:33:36 localhost /usr/sbin/irqbalance[582]: libcap-ng used by "/usr/sbin/irqbalance" failed dropping bounding set due to not having CAP_SETPCAP in capng_apply Oct 12 13:33:36 localhost systemd-logind[584]: New seat seat0. ░░ Subject: A new seat seat0 is now available ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new seat seat0 has been configured and is now available. Oct 12 13:33:36 localhost systemd-logind[584]: Watching system buttons on /dev/input/event0 (Power Button) Oct 12 13:33:36 localhost systemd-logind[584]: Watching system buttons on /dev/input/event1 (Sleep Button) Oct 12 13:33:36 localhost systemd-logind[584]: Watching system buttons on /dev/input/event2 (AT Translated Set 2 keyboard) Oct 12 13:33:36 localhost systemd[1]: Started User Login Management. ░░ Subject: A start job for unit systemd-logind.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-logind.service has finished successfully. ░░ ░░ The job identifier is 210. Oct 12 13:33:37 localhost systemd[1]: logrotate.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit logrotate.service has successfully entered the 'dead' state. Oct 12 13:33:37 localhost systemd[1]: Finished Rotate log files. ░░ Subject: A start job for unit logrotate.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has finished successfully. ░░ ░░ The job identifier is 266. Oct 12 13:33:37 localhost chronyd[591]: chronyd version 4.6 starting (+CMDMON +NTP +REFCLOCK +RTC +PRIVDROP +SCFILTER +SIGND +ASYNCDNS +NTS +SECHASH +IPV6 +DEBUG) Oct 12 13:33:37 localhost chronyd[591]: Loaded 0 symmetric keys Oct 12 13:33:37 localhost rngd[583]: Disabling 7: PKCS11 Entropy generator (pkcs11) Oct 12 13:33:37 localhost rngd[583]: Disabling 5: NIST Network Entropy Beacon (nist) Oct 12 13:33:37 localhost rngd[583]: Disabling 9: Qrypt quantum entropy beacon (qrypt) Oct 12 13:33:37 localhost rngd[583]: Initializing available sources Oct 12 13:33:37 localhost rngd[583]: [hwrng ]: Initialization Failed Oct 12 13:33:37 localhost rngd[583]: [rdrand]: Enabling RDRAND rng support Oct 12 13:33:37 localhost rngd[583]: [rdrand]: Initialized Oct 12 13:33:37 localhost rngd[583]: [jitter]: JITTER timeout set to 5 sec Oct 12 13:33:37 localhost rngd[583]: [jitter]: Initializing AES buffer Oct 12 13:33:37 localhost chronyd[591]: Using right/UTC timezone to obtain leap second data Oct 12 13:33:37 localhost chronyd[591]: Frequency 0.000 +/- 1000000.000 ppm read from /var/lib/chrony/drift Oct 12 13:33:37 localhost chronyd[591]: Loaded seccomp filter (level 2) Oct 12 13:33:37 localhost systemd[1]: Started NTP client/server. ░░ Subject: A start job for unit chronyd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit chronyd.service has finished successfully. ░░ ░░ The job identifier is 235. Oct 12 13:33:42 localhost rngd[583]: [jitter]: Unable to obtain AES key, disabling JITTER source Oct 12 13:33:42 localhost rngd[583]: [jitter]: Initialization Failed Oct 12 13:33:42 localhost rngd[583]: [namedpipe]: Initialization Failed Oct 12 13:33:42 localhost rngd[583]: Process privileges have been dropped to 2:2 Oct 12 13:33:43 localhost cloud-init[597]: Cloud-init v. 23.4-19.el9 running 'init-local' at Sat, 12 Oct 2024 17:33:43 +0000. Up 28.45 seconds. Oct 12 13:33:44 localhost dhclient[600]: Internet Systems Consortium DHCP Client 4.4.2b1 Oct 12 13:33:44 localhost dhclient[600]: Copyright 2004-2019 Internet Systems Consortium. Oct 12 13:33:44 localhost dhclient[600]: All rights reserved. Oct 12 13:33:44 localhost dhclient[600]: For info, please visit https://www.isc.org/software/dhcp/ Oct 12 13:33:44 localhost dhclient[600]: Oct 12 13:33:44 localhost dhclient[600]: Listening on LPF/eth0/02:64:6e:af:52:13 Oct 12 13:33:44 localhost dhclient[600]: Sending on LPF/eth0/02:64:6e:af:52:13 Oct 12 13:33:44 localhost dhclient[600]: Sending on Socket/fallback Oct 12 13:33:44 localhost dhclient[600]: DHCPDISCOVER on eth0 to 255.255.255.255 port 67 interval 3 (xid=0x537d615b) Oct 12 13:33:44 localhost dhclient[600]: DHCPOFFER of 10.31.44.126 from 10.31.44.1 Oct 12 13:33:44 localhost dhclient[600]: DHCPREQUEST for 10.31.44.126 on eth0 to 255.255.255.255 port 67 (xid=0x537d615b) Oct 12 13:33:44 localhost dhclient[600]: DHCPACK of 10.31.44.126 from 10.31.44.1 (xid=0x537d615b) Oct 12 13:33:44 localhost dhclient[600]: bound to 10.31.44.126 -- renewal in 1558 seconds. Oct 12 13:33:44 localhost systemd[1]: Starting Hostname Service... ░░ Subject: A start job for unit systemd-hostnamed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has begun execution. ░░ ░░ The job identifier is 329. Oct 12 13:33:44 localhost systemd[1]: Started Hostname Service. ░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has finished successfully. ░░ ░░ The job identifier is 329. Oct 12 13:33:44 ip-10-31-44-126.us-east-1.aws.redhat.com systemd-hostnamed[615]: Hostname set to (static) Oct 12 13:33:44 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Finished Initial cloud-init job (pre-networking). ░░ Subject: A start job for unit cloud-init-local.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init-local.service has finished successfully. ░░ ░░ The job identifier is 217. Oct 12 13:33:44 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Reached target Preparation for Network. ░░ Subject: A start job for unit network-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network-pre.target has finished successfully. ░░ ░░ The job identifier is 155. Oct 12 13:33:44 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager... ░░ Subject: A start job for unit NetworkManager.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager.service has begun execution. ░░ ░░ The job identifier is 191. Oct 12 13:33:45 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754425.5148] NetworkManager (version 1.51.0-1.el9) is starting... (boot:e283f41f-f8ba-4d56-97ed-896e012d08e4) Oct 12 13:33:45 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754425.5150] Read config: /etc/NetworkManager/NetworkManager.conf (run: 15-carrier-timeout.conf) Oct 12 13:33:45 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754425.7320] manager[0x56176e826080]: monitoring kernel firmware directory '/lib/firmware'. Oct 12 13:33:45 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754425.7343] hostname: hostname: using hostnamed Oct 12 13:33:45 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754425.7343] hostname: static hostname changed from (none) to "ip-10-31-44-126.us-east-1.aws.redhat.com" Oct 12 13:33:45 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754425.7383] dns-mgr: init: dns=default,systemd-resolved rc-manager=symlink (auto) Oct 12 13:33:45 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754425.8414] manager[0x56176e826080]: rfkill: Wi-Fi hardware radio set enabled Oct 12 13:33:45 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754425.8414] manager[0x56176e826080]: rfkill: WWAN hardware radio set enabled Oct 12 13:33:45 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754425.8497] Loaded device plugin: NMTeamFactory (/usr/lib64/NetworkManager/1.51.0-1.el9/libnm-device-plugin-team.so) Oct 12 13:33:45 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754425.8498] manager: rfkill: Wi-Fi enabled by radio killswitch; enabled by state file Oct 12 13:33:45 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754425.8503] manager: rfkill: WWAN enabled by radio killswitch; enabled by state file Oct 12 13:33:45 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754425.8504] manager: Networking is enabled by state file Oct 12 13:33:45 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754425.8544] settings: Loaded settings plugin: keyfile (internal) Oct 12 13:33:45 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 394. Oct 12 13:33:45 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Listening on Load/Save RF Kill Switch Status /dev/rfkill Watch. ░░ Subject: A start job for unit systemd-rfkill.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-rfkill.socket has finished successfully. ░░ ░░ The job identifier is 458. Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0116] settings: Loaded settings plugin: ifcfg-rh ("/usr/lib64/NetworkManager/1.51.0-1.el9/libnm-settings-plugin-ifcfg-rh.so") Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0789] Warning: the ifcfg-rh plugin is deprecated, please migrate connections to the keyfile format using "nmcli connection migrate" Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0797] dhcp: init: Using DHCP client 'internal' Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0800] manager: (lo): new Loopback device (/org/freedesktop/NetworkManager/Devices/1) Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0810] device (lo): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0815] device (lo): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0821] device (lo): Activation: starting connection 'lo' (27c0cb9f-d60f-44af-950d-9aa226d0adc3) Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0828] manager: (eth0): new Ethernet device (/org/freedesktop/NetworkManager/Devices/2) Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0832] device (eth0): state change: unmanaged -> unavailable (reason 'managed', managed-type: 'external') Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Started Network Manager. ░░ Subject: A start job for unit NetworkManager.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager.service has finished successfully. ░░ ░░ The job identifier is 191. Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0860] bus-manager: acquired D-Bus service "org.freedesktop.NetworkManager" Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0865] device (lo): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0868] device (lo): state change: prepare -> config (reason 'none', managed-type: 'external') Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0871] device (lo): state change: config -> ip-config (reason 'none', managed-type: 'external') Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Reached target Network. ░░ Subject: A start job for unit network.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network.target has finished successfully. ░░ ░░ The job identifier is 193. Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0888] device (eth0): carrier: link connected Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0892] device (lo): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0898] device (eth0): state change: unavailable -> disconnected (reason 'carrier-changed', managed-type: 'full') Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0904] policy: auto-activating connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03) Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0909] device (eth0): Activation: starting connection 'System eth0' (5fb06bd0-0bb0-7ffb-45f1-d6edd65f3e03) Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0911] device (eth0): state change: disconnected -> prepare (reason 'none', managed-type: 'full') Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0914] manager: NetworkManager state is now CONNECTING Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0916] device (eth0): state change: prepare -> config (reason 'none', managed-type: 'full') Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0922] device (eth0): state change: config -> ip-config (reason 'none', managed-type: 'full') Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0925] dhcp4 (eth0): activation: beginning transaction (timeout in 45 seconds) Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0940] dhcp4 (eth0): state changed new lease, address=10.31.44.126 Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.0957] policy: set 'System eth0' (eth0) as default for IPv4 routing and DNS Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Starting Network Manager Wait Online... ░░ Subject: A start job for unit NetworkManager-wait-online.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-wait-online.service has begun execution. ░░ ░░ The job identifier is 190. Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Starting GSSAPI Proxy Daemon... ░░ Subject: A start job for unit gssproxy.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit gssproxy.service has begun execution. ░░ ░░ The job identifier is 208. Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Started Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 394. Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.1133] device (eth0): state change: ip-config -> ip-check (reason 'none', managed-type: 'full') Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.1394] device (lo): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.1401] device (eth0): state change: ip-check -> secondaries (reason 'none', managed-type: 'full') Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.1404] device (lo): state change: secondaries -> activated (reason 'none', managed-type: 'external') Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.1418] device (lo): Activation: successful, device activated. Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.1432] device (eth0): state change: secondaries -> activated (reason 'none', managed-type: 'full') Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.1452] manager: NetworkManager state is now CONNECTED_SITE Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.1459] device (eth0): Activation: successful, device activated. Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.1467] manager: NetworkManager state is now CONNECTED_GLOBAL Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com NetworkManager[619]: [1728754426.1476] manager: startup complete Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Finished Network Manager Wait Online. ░░ Subject: A start job for unit NetworkManager-wait-online.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-wait-online.service has finished successfully. ░░ ░░ The job identifier is 190. Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Starting Initial cloud-init job (metadata service crawler)... ░░ Subject: A start job for unit cloud-init.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.service has begun execution. ░░ ░░ The job identifier is 216. Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com chronyd[591]: Added source 10.11.160.238 Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com chronyd[591]: Added source 10.18.100.10 Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com chronyd[591]: Added source 10.2.32.37 Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com chronyd[591]: Added source 10.2.32.38 Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: Cloud-init v. 23.4-19.el9 running 'init' at Sat, 12 Oct 2024 17:33:46 +0000. Up 31.43 seconds. Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: ++++++++++++++++++++++++++++++++++++++Net device info++++++++++++++++++++++++++++++++++++++ Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: +--------+------+----------------------------+---------------+--------+-------------------+ Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: | Device | Up | Address | Mask | Scope | Hw-Address | Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: +--------+------+----------------------------+---------------+--------+-------------------+ Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: | eth0 | True | 10.31.44.126 | 255.255.252.0 | global | 02:64:6e:af:52:13 | Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: | eth0 | True | fe80::64:6eff:feaf:5213/64 | . | link | 02:64:6e:af:52:13 | Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: | lo | True | 127.0.0.1 | 255.0.0.0 | host | . | Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: | lo | True | ::1/128 | . | host | . | Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: +--------+------+----------------------------+---------------+--------+-------------------+ Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: ++++++++++++++++++++++++++++Route IPv4 info+++++++++++++++++++++++++++++ Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: | Route | Destination | Gateway | Genmask | Interface | Flags | Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: | 0 | 0.0.0.0 | 10.31.44.1 | 0.0.0.0 | eth0 | UG | Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: | 1 | 10.31.44.0 | 0.0.0.0 | 255.255.252.0 | eth0 | U | Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: +-------+-------------+------------+---------------+-----------+-------+ Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: +++++++++++++++++++Route IPv6 info+++++++++++++++++++ Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: +-------+-------------+---------+-----------+-------+ Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: | Route | Destination | Gateway | Interface | Flags | Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: +-------+-------------+---------+-----------+-------+ Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: | 1 | fe80::/64 | :: | eth0 | U | Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: | 3 | multicast | :: | eth0 | U | Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: ci-info: +-------+-------------+---------+-----------+-------+ Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Started GSSAPI Proxy Daemon. ░░ Subject: A start job for unit gssproxy.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit gssproxy.service has finished successfully. ░░ ░░ The job identifier is 208. Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: RPC security service for NFS client and server was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab). ░░ Subject: A start job for unit rpc-gssd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-gssd.service has finished successfully. ░░ ░░ The job identifier is 204. Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Reached target NFS client services. ░░ Subject: A start job for unit nfs-client.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit nfs-client.target has finished successfully. ░░ ░░ The job identifier is 200. Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Reached target Preparation for Remote File Systems. ░░ Subject: A start job for unit remote-fs-pre.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs-pre.target has finished successfully. ░░ ░░ The job identifier is 201. Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Reached target Remote File Systems. ░░ Subject: A start job for unit remote-fs.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit remote-fs.target has finished successfully. ░░ ░░ The job identifier is 231. Oct 12 13:33:46 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: TPM2 PCR Barrier (User) was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f). ░░ Subject: A start job for unit systemd-pcrphase.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-pcrphase.service has finished successfully. ░░ ░░ The job identifier is 126. Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 0 affinity: Input/output error Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: IRQ 0 affinity is now unmanaged Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 48 affinity: Input/output error Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: IRQ 48 affinity is now unmanaged Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 49 affinity: Input/output error Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: IRQ 49 affinity is now unmanaged Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 50 affinity: Input/output error Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: IRQ 50 affinity is now unmanaged Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 51 affinity: Input/output error Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: IRQ 51 affinity is now unmanaged Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 52 affinity: Input/output error Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: IRQ 52 affinity is now unmanaged Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 53 affinity: Input/output error Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: IRQ 53 affinity is now unmanaged Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 54 affinity: Input/output error Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: IRQ 54 affinity is now unmanaged Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 55 affinity: Input/output error Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: IRQ 55 affinity is now unmanaged Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 56 affinity: Input/output error Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: IRQ 56 affinity is now unmanaged Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 57 affinity: Input/output error Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: IRQ 57 affinity is now unmanaged Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 58 affinity: Input/output error Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: IRQ 58 affinity is now unmanaged Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: Cannot change IRQ 59 affinity: Input/output error Oct 12 13:33:47 ip-10-31-44-126.us-east-1.aws.redhat.com irqbalance[582]: IRQ 59 affinity is now unmanaged Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: Generating public/private rsa key pair. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: Your identification has been saved in /etc/ssh/ssh_host_rsa_key Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: Your public key has been saved in /etc/ssh/ssh_host_rsa_key.pub Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: The key fingerprint is: Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: SHA256:D4AB6fc633+iGscxIXUijGvwGeNVlRNwOULHDwIaE9A root@ip-10-31-44-126.us-east-1.aws.redhat.com Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: The key's randomart image is: Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: +---[RSA 3072]----+ Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | o==+o+*+*+ | Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | o E*ooo=B | Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | . +o*o .o = | Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | . B o . . | Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | o . S | Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | .. = | Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | .. o . | Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | o + . . | Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | oo.oo.o | Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: +----[SHA256]-----+ Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: Generating public/private dsa key pair. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: Your identification has been saved in /etc/ssh/ssh_host_dsa_key Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: Your public key has been saved in /etc/ssh/ssh_host_dsa_key.pub Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: The key fingerprint is: Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: SHA256:td8REfC63FtZv9FLV3AXf0dvUY6OZCN4hOgQp6ozOzE root@ip-10-31-44-126.us-east-1.aws.redhat.com Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: The key's randomart image is: Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: +---[DSA 1024]----+ Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | .... .. ..o++| Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | .o. .o . ==| Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | .o . + + = X| Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | . . o = = =+| Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | . S . o o o| Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: |E. o + .*| Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: |+o + o+=| Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: |.+ .o=| Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: |.. .o | Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: +----[SHA256]-----+ Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: Generating public/private ecdsa key pair. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: Your identification has been saved in /etc/ssh/ssh_host_ecdsa_key Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: Your public key has been saved in /etc/ssh/ssh_host_ecdsa_key.pub Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: The key fingerprint is: Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: SHA256:9lm72/nRe1dAq9Pq/Rmid0BhvuozQRsnrC93n+h7RLw root@ip-10-31-44-126.us-east-1.aws.redhat.com Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: The key's randomart image is: Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: +---[ECDSA 256]---+ Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | | Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | o. | Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | . o.o. | Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | = +oo | Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | S o Boo..| Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | . o =o+.Eo| Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | + ++ooo| Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | . *o*o+O| Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | ++X=OB=| Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: +----[SHA256]-----+ Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: Generating public/private ed25519 key pair. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: Your identification has been saved in /etc/ssh/ssh_host_ed25519_key Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: Your public key has been saved in /etc/ssh/ssh_host_ed25519_key.pub Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: The key fingerprint is: Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: SHA256:oY1MjRazlrTV4qX1HYQ2qyPwddWwecKNclKOm5il33s root@ip-10-31-44-126.us-east-1.aws.redhat.com Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: The key's randomart image is: Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: +--[ED25519 256]--+ Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | + .. o+o | Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | . X. + +=o=.| Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | O.o= o==O.o| Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | =.+o..=+*.o | Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | +oS.+oo | Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | o o. . | Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | . .. . | Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | .E| Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: | .. | Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[704]: +----[SHA256]-----+ Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Finished Initial cloud-init job (metadata service crawler). ░░ Subject: A start job for unit cloud-init.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.service has finished successfully. ░░ ░░ The job identifier is 216. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Reached target Cloud-config availability. ░░ Subject: A start job for unit cloud-config.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.target has finished successfully. ░░ ░░ The job identifier is 225. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Reached target Network is Online. ░░ Subject: A start job for unit network-online.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit network-online.target has finished successfully. ░░ ░░ The job identifier is 189. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Starting Apply the settings specified in cloud-config... ░░ Subject: A start job for unit cloud-config.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.service has begun execution. ░░ ░░ The job identifier is 224. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Starting Crash recovery kernel arming... ░░ Subject: A start job for unit kdump.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kdump.service has begun execution. ░░ ░░ The job identifier is 213. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Starting The restraint harness.... ░░ Subject: A start job for unit restraintd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit restraintd.service has begun execution. ░░ ░░ The job identifier is 198. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Starting Notify NFS peers of a restart... ░░ Subject: A start job for unit rpc-statd-notify.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-statd-notify.service has begun execution. ░░ ░░ The job identifier is 202. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Starting System Logging Service... ░░ Subject: A start job for unit rsyslog.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rsyslog.service has begun execution. ░░ ░░ The job identifier is 209. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Starting OpenSSH server daemon... ░░ Subject: A start job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 218. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Started The restraint harness.. ░░ Subject: A start job for unit restraintd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit restraintd.service has finished successfully. ░░ ░░ The job identifier is 198. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com sshd[790]: Server listening on 0.0.0.0 port 22. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com sshd[790]: Server listening on :: port 22. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Started OpenSSH server daemon. ░░ Subject: A start job for unit sshd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has finished successfully. ░░ ░░ The job identifier is 218. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com sm-notify[787]: Version 2.5.4 starting Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Started Notify NFS peers of a restart. ░░ Subject: A start job for unit rpc-statd-notify.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rpc-statd-notify.service has finished successfully. ░░ ░░ The job identifier is 202. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[819]: Cloud-init v. 23.4-19.el9 running 'modules:config' at Sat, 12 Oct 2024 17:33:48 +0000. Up 33.48 seconds. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com rsyslogd[789]: [origin software="rsyslogd" swVersion="8.2310.0-4.el9" x-pid="789" x-info="https://www.rsyslog.com"] start Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com rsyslogd[789]: imjournal: journal files changed, reloading... [v8.2310.0-4.el9 try https://www.rsyslog.com/e/0 ] Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Started System Logging Service. ░░ Subject: A start job for unit rsyslog.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit rsyslog.service has finished successfully. ░░ ░░ The job identifier is 209. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com sshd[790]: Received signal 15; terminating. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Stopping OpenSSH server daemon... ░░ Subject: A stop job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 479. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: sshd.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit sshd.service has successfully entered the 'dead' state. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Stopped OpenSSH server daemon. ░░ Subject: A stop job for unit sshd.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd.service has finished. ░░ ░░ The job identifier is 479 and the job result is done. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Stopped target sshd-keygen.target. ░░ Subject: A stop job for unit sshd-keygen.target has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd-keygen.target has finished. ░░ ░░ The job identifier is 547 and the job result is done. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Stopping sshd-keygen.target... ░░ Subject: A stop job for unit sshd-keygen.target has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit sshd-keygen.target has begun execution. ░░ ░░ The job identifier is 547. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ecdsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ecdsa.service has finished successfully. ░░ ░░ The job identifier is 543. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@ed25519.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@ed25519.service has finished successfully. ░░ ░░ The job identifier is 546. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target). ░░ Subject: A start job for unit sshd-keygen@rsa.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen@rsa.service has finished successfully. ░░ ░░ The job identifier is 545. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Reached target sshd-keygen.target. ░░ Subject: A start job for unit sshd-keygen.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd-keygen.target has finished successfully. ░░ ░░ The job identifier is 547. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Starting OpenSSH server daemon... ░░ Subject: A start job for unit sshd.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has begun execution. ░░ ░░ The job identifier is 479. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com sshd[844]: Server listening on 0.0.0.0 port 22. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com sshd[844]: Server listening on :: port 22. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Started OpenSSH server daemon. ░░ Subject: A start job for unit sshd.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit sshd.service has finished successfully. ░░ ░░ The job identifier is 479. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Finished Apply the settings specified in cloud-config. ░░ Subject: A start job for unit cloud-config.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-config.service has finished successfully. ░░ ░░ The job identifier is 224. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Starting Execute cloud user/final scripts... ░░ Subject: A start job for unit cloud-final.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-final.service has begun execution. ░░ ░░ The job identifier is 226. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Starting Permit User Sessions... ░░ Subject: A start job for unit systemd-user-sessions.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-user-sessions.service has begun execution. ░░ ░░ The job identifier is 246. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Finished Permit User Sessions. ░░ Subject: A start job for unit systemd-user-sessions.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-user-sessions.service has finished successfully. ░░ ░░ The job identifier is 246. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Started Command Scheduler. ░░ Subject: A start job for unit crond.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit crond.service has finished successfully. ░░ ░░ The job identifier is 214. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Started Getty on tty1. ░░ Subject: A start job for unit getty@tty1.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit getty@tty1.service has finished successfully. ░░ ░░ The job identifier is 239. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Started Serial Getty on ttyS0. ░░ Subject: A start job for unit serial-getty@ttyS0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit serial-getty@ttyS0.service has finished successfully. ░░ ░░ The job identifier is 243. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Reached target Login Prompts. ░░ Subject: A start job for unit getty.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit getty.target has finished successfully. ░░ ░░ The job identifier is 238. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Reached target Multi-User System. ░░ Subject: A start job for unit multi-user.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit multi-user.target has finished successfully. ░░ ░░ The job identifier is 115. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Starting Record Runlevel Change in UTMP... ░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp-runlevel.service has begun execution. ░░ ░░ The job identifier is 228. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: systemd-update-utmp-runlevel.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-update-utmp-runlevel.service has successfully entered the 'dead' state. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Finished Record Runlevel Change in UTMP. ░░ Subject: A start job for unit systemd-update-utmp-runlevel.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-update-utmp-runlevel.service has finished successfully. ░░ ░░ The job identifier is 228. Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com restraintd[791]: Listening on http://localhost:8081 Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com crond[849]: (CRON) STARTUP (1.5.7) Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com crond[849]: (CRON) INFO (Syslog will be used instead of sendmail.) Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com crond[849]: (CRON) INFO (RANDOM_DELAY will be scaled with factor 90% if used.) Oct 12 13:33:48 ip-10-31-44-126.us-east-1.aws.redhat.com crond[849]: (CRON) INFO (running with inotify support) Oct 12 13:33:49 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[856]: Cloud-init v. 23.4-19.el9 running 'modules:final' at Sat, 12 Oct 2024 17:33:48 +0000. Up 34.00 seconds. Oct 12 13:33:49 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[865]: ############################################################# Oct 12 13:33:49 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[866]: -----BEGIN SSH HOST KEY FINGERPRINTS----- Oct 12 13:33:49 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[868]: 1024 SHA256:td8REfC63FtZv9FLV3AXf0dvUY6OZCN4hOgQp6ozOzE root@ip-10-31-44-126.us-east-1.aws.redhat.com (DSA) Oct 12 13:33:49 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[873]: 256 SHA256:9lm72/nRe1dAq9Pq/Rmid0BhvuozQRsnrC93n+h7RLw root@ip-10-31-44-126.us-east-1.aws.redhat.com (ECDSA) Oct 12 13:33:49 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[879]: 256 SHA256:oY1MjRazlrTV4qX1HYQ2qyPwddWwecKNclKOm5il33s root@ip-10-31-44-126.us-east-1.aws.redhat.com (ED25519) Oct 12 13:33:49 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[887]: 3072 SHA256:D4AB6fc633+iGscxIXUijGvwGeNVlRNwOULHDwIaE9A root@ip-10-31-44-126.us-east-1.aws.redhat.com (RSA) Oct 12 13:33:49 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[889]: -----END SSH HOST KEY FINGERPRINTS----- Oct 12 13:33:49 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[891]: ############################################################# Oct 12 13:33:49 ip-10-31-44-126.us-east-1.aws.redhat.com cloud-init[856]: Cloud-init v. 23.4-19.el9 finished at Sat, 12 Oct 2024 17:33:49 +0000. Datasource DataSourceEc2Local. Up 34.27 seconds Oct 12 13:33:49 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Finished Execute cloud user/final scripts. ░░ Subject: A start job for unit cloud-final.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-final.service has finished successfully. ░░ ░░ The job identifier is 226. Oct 12 13:33:49 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Reached target Cloud-init target. ░░ Subject: A start job for unit cloud-init.target has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit cloud-init.target has finished successfully. ░░ ░░ The job identifier is 215. Oct 12 13:33:49 ip-10-31-44-126.us-east-1.aws.redhat.com kdumpctl[794]: kdump: Detected change(s) in the following file(s): /etc/fstab Oct 12 13:33:52 ip-10-31-44-126.us-east-1.aws.redhat.com chronyd[591]: Selected source 10.2.32.38 Oct 12 13:33:52 ip-10-31-44-126.us-east-1.aws.redhat.com chronyd[591]: System clock TAI offset set to 37 seconds Oct 12 13:33:54 ip-10-31-44-126.us-east-1.aws.redhat.com chronyd[591]: Selected source 10.11.160.238 Oct 12 13:33:55 ip-10-31-44-126.us-east-1.aws.redhat.com kernel: block xvda: the capability attribute has been deprecated. Oct 12 13:33:55 ip-10-31-44-126.us-east-1.aws.redhat.com kdumpctl[794]: kdump: Rebuilding /boot/initramfs-5.14.0-513.el9.x86_64kdump.img Oct 12 13:33:56 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Oct 12 13:33:56 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1414]: dracut-057-70.git20240819.el9 Oct 12 13:33:57 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: Executing: /usr/bin/dracut --add kdumpbase --quiet --hostonly --hostonly-cmdline --hostonly-i18n --hostonly-mode strict --hostonly-nics -o "plymouth resume ifcfg earlykdump" --mount "/dev/disk/by-uuid/efa2924f-8850-491c-a9bf-997f8385d98d /sysroot xfs rw,relatime,seclabel,attr2,inode64,logbufs=8,logbsize=32k,noquota" --squash-compressor zstd --no-hostonly-default-device -f /boot/initramfs-5.14.0-513.el9.x86_64kdump.img 5.14.0-513.el9.x86_64 Oct 12 13:33:58 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'systemd-networkd' will not be installed, because command 'networkctl' could not be found! Oct 12 13:33:58 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd' could not be found! Oct 12 13:33:58 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd-wait-online' could not be found! Oct 12 13:33:58 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'systemd-resolved' will not be installed, because command 'resolvectl' could not be found! Oct 12 13:33:58 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Oct 12 13:33:58 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Oct 12 13:33:58 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Oct 12 13:33:58 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'busybox' will not be installed, because command 'busybox' could not be found! Oct 12 13:33:58 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Oct 12 13:33:58 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'connman' will not be installed, because command 'connmand' could not be found! Oct 12 13:33:58 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'connman' will not be installed, because command 'connmanctl' could not be found! Oct 12 13:33:58 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'network-wicked' will not be installed, because command 'wicked' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: Module 'ifcfg' will not be installed, because it's in the list to be omitted! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: Module 'plymouth' will not be installed, because it's in the list to be omitted! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'lvmmerge' will not be installed, because command 'lvm' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'lvmthinpool-monitor' will not be installed, because command 'lvm' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'btrfs' will not be installed, because command 'btrfs' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'dmraid' will not be installed, because command 'dmraid' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'lvm' will not be installed, because command 'lvm' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'mdraid' will not be installed, because command 'mdadm' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'pcsc' will not be installed, because command 'pcscd' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'tpm2-tss' will not be installed, because command 'tpm2' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'iscsi' will not be installed, because command 'iscsid' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'nvmf' will not be installed, because command 'nvme' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: Module 'resume' will not be installed, because it's in the list to be omitted! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'biosdevname' will not be installed, because command 'biosdevname' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: Module 'earlykdump' will not be installed, because it's in the list to be omitted! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'memstrack' will not be installed, because command 'memstrack' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: memstrack is not available Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: If you need to use rd.memdebug>=4, please install memstrack and procps-ng Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'systemd-resolved' will not be installed, because command 'resolvectl' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'busybox' will not be installed, because command 'busybox' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'connman' will not be installed, because command 'connmand' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'connman' will not be installed, because command 'connmanctl' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'network-wicked' will not be installed, because command 'wicked' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'lvmmerge' will not be installed, because command 'lvm' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'lvmthinpool-monitor' will not be installed, because command 'lvm' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'btrfs' will not be installed, because command 'btrfs' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'dmraid' will not be installed, because command 'dmraid' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'lvm' will not be installed, because command 'lvm' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'mdraid' will not be installed, because command 'mdadm' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'pcsc' will not be installed, because command 'pcscd' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'tpm2-tss' will not be installed, because command 'tpm2' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'iscsi' will not be installed, because command 'iscsid' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'nvmf' will not be installed, because command 'nvme' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: dracut module 'memstrack' will not be installed, because command 'memstrack' could not be found! Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: memstrack is not available Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: If you need to use rd.memdebug>=4, please install memstrack and procps-ng Oct 12 13:33:59 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Including module: systemd *** Oct 12 13:34:00 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Including module: systemd-initrd *** Oct 12 13:34:00 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Including module: nss-softokn *** Oct 12 13:34:00 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Including module: rngd *** Oct 12 13:34:00 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Including module: i18n *** Oct 12 13:34:00 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Including module: drm *** Oct 12 13:34:00 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Including module: prefixdevname *** Oct 12 13:34:00 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Including module: kernel-modules *** Oct 12 13:34:01 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Including module: kernel-modules-extra *** Oct 12 13:34:01 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: kernel-modules-extra: configuration source "/run/depmod.d" does not exist Oct 12 13:34:01 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: kernel-modules-extra: configuration source "/lib/depmod.d" does not exist Oct 12 13:34:01 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: kernel-modules-extra: parsing configuration file "/etc/depmod.d/dist.conf" Oct 12 13:34:01 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: kernel-modules-extra: /etc/depmod.d/dist.conf: added "updates extra built-in weak-updates" to the list of search directories Oct 12 13:34:01 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Including module: fstab-sys *** Oct 12 13:34:01 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Including module: rootfs-block *** Oct 12 13:34:01 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Including module: terminfo *** Oct 12 13:34:01 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Including module: udev-rules *** Oct 12 13:34:01 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: Skipping udev rule: 91-permissions.rules Oct 12 13:34:01 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: Skipping udev rule: 80-drivers-modprobe.rules Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Including module: dracut-systemd *** Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Including module: usrmount *** Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Including module: base *** Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Including module: fs-lib *** Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Including module: kdumpbase *** Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Including module: microcode_ctl-fw_dir_override *** Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: microcode_ctl module: mangling fw_dir Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: microcode_ctl: reset fw_dir to "/lib/firmware/updates /lib/firmware" Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel"... Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: microcode_ctl: intel: caveats check for kernel version "5.14.0-513.el9.x86_64" passed, adding "/usr/share/microcode_ctl/ucode_with_caveats/intel" to fw_dir variable Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-2d-07"... Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: microcode_ctl: configuration "intel-06-2d-07" is ignored Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4e-03"... Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: microcode_ctl: configuration "intel-06-4e-03" is ignored Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4f-01"... Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: microcode_ctl: configuration "intel-06-4f-01" is ignored Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-55-04"... Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: microcode_ctl: configuration "intel-06-55-04" is ignored Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-5e-03"... Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: microcode_ctl: configuration "intel-06-5e-03" is ignored Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8c-01"... Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: microcode_ctl: configuration "intel-06-8c-01" is ignored Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8e-9e-0x-0xca"... Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: microcode_ctl: configuration "intel-06-8e-9e-0x-0xca" is ignored Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-8e-9e-0x-dell"... Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: microcode_ctl: configuration "intel-06-8e-9e-0x-dell" is ignored Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: microcode_ctl: final fw_dir: "/usr/share/microcode_ctl/ucode_with_caveats/intel /lib/firmware/updates /lib/firmware" Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Including module: shutdown *** Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Including module: squash *** Oct 12 13:34:02 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Including modules done *** Oct 12 13:34:03 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Installing kernel module dependencies *** Oct 12 13:34:04 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Installing kernel module dependencies done *** Oct 12 13:34:04 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Resolving executable dependencies *** Oct 12 13:34:05 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Resolving executable dependencies done *** Oct 12 13:34:05 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Hardlinking files *** Oct 12 13:34:05 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: Mode: real Oct 12 13:34:05 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: Files: 433 Oct 12 13:34:05 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: Linked: 1 files Oct 12 13:34:05 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: Compared: 0 xattrs Oct 12 13:34:05 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: Compared: 8 files Oct 12 13:34:05 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: Saved: 56.15 KiB Oct 12 13:34:05 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: Duration: 0.004516 seconds Oct 12 13:34:05 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Hardlinking files done *** Oct 12 13:34:05 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Generating early-microcode cpio image *** Oct 12 13:34:05 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Constructing GenuineIntel.bin *** Oct 12 13:34:05 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Constructing GenuineIntel.bin *** Oct 12 13:34:05 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Store current command line parameters *** Oct 12 13:34:05 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: Stored kernel commandline: Oct 12 13:34:05 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: No dracut internal kernel commandline stored in the initramfs Oct 12 13:34:06 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Install squash loader *** Oct 12 13:34:06 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Stripping files *** Oct 12 13:34:07 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Stripping files done *** Oct 12 13:34:07 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Squashing the files inside the initramfs *** Oct 12 13:34:12 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Squashing the files inside the initramfs done *** Oct 12 13:34:12 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Creating image file '/boot/initramfs-5.14.0-513.el9.x86_64kdump.img' *** Oct 12 13:34:13 ip-10-31-44-126.us-east-1.aws.redhat.com dracut[1416]: *** Creating initramfs image file '/boot/initramfs-5.14.0-513.el9.x86_64kdump.img' done *** Oct 12 13:34:13 ip-10-31-44-126.us-east-1.aws.redhat.com kdumpctl[794]: kdump: kexec: loaded kdump kernel Oct 12 13:34:13 ip-10-31-44-126.us-east-1.aws.redhat.com kdumpctl[794]: kdump: Starting kdump: [OK] Oct 12 13:34:13 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Finished Crash recovery kernel arming. ░░ Subject: A start job for unit kdump.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kdump.service has finished successfully. ░░ ░░ The job identifier is 213. Oct 12 13:34:13 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Startup finished in 1.076s (kernel) + 7.920s (initrd) + 49.721s (userspace) = 58.718s. ░░ Subject: System start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ All system services necessary queued for starting at boot have been ░░ started. Note that this does not mean that the machine is now idle as services ░░ might still be busy with completing start-up. ░░ ░░ Kernel start-up required 1076427 microseconds. ░░ ░░ Initrd start-up required 7920718 microseconds. ░░ ░░ Userspace start-up required 49721333 microseconds. Oct 12 13:34:15 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com sshd[4046]: Accepted publickey for root from 10.30.33.143 port 45152 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Created slice User Slice of UID 0. ░░ Subject: A start job for unit user-0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-0.slice has finished successfully. ░░ ░░ The job identifier is 612. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Starting User Runtime Directory /run/user/0... ░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 611. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd-logind[584]: New session 1 of user root. ░░ Subject: A new session 1 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 1 has been created for the user root. ░░ ░░ The leading process of the session is 4046. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Finished User Runtime Directory /run/user/0. ░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has finished successfully. ░░ ░░ The job identifier is 611. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Starting User Manager for UID 0... ░░ Subject: A start job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 548. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0) Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Queued start job for default target Main User Target. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Created slice User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system). ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Started Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 6. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Reached target Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Reached target Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Starting D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Starting Create User's Volatile Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 7. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Listening on D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Finished Create User's Volatile Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 7. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Reached target Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Reached target Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Reached target Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Startup finished in 147ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 0 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 147492 microseconds. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Started User Manager for UID 0. ░░ Subject: A start job for unit user@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has finished successfully. ░░ ░░ The job identifier is 548. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Started Session 1 of User root. ░░ Subject: A start job for unit session-1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-1.scope has finished successfully. ░░ ░░ The job identifier is 614. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com sshd[4046]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com sshd[4059]: Received disconnect from 10.30.33.143 port 45152:11: disconnected by user Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com sshd[4059]: Disconnected from user root 10.30.33.143 port 45152 Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com sshd[4046]: pam_unix(sshd:session): session closed for user root Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: session-1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-1.scope has successfully entered the 'dead' state. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd-logind[584]: Session 1 logged out. Waiting for processes to exit. Oct 12 13:37:20 ip-10-31-44-126.us-east-1.aws.redhat.com systemd-logind[584]: Removed session 1. ░░ Subject: Session 1 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 1 has been terminated. Oct 12 13:37:31 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Stopping User Manager for UID 0... ░░ Subject: A stop job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 681. Oct 12 13:37:31 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Activating special unit Exit the Session... Oct 12 13:37:31 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Stopped target Main User Target. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 27 and the job result is done. Oct 12 13:37:31 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Stopped target Basic System. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 29 and the job result is done. Oct 12 13:37:31 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Stopped target Paths. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 21 and the job result is done. Oct 12 13:37:31 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Stopped target Sockets. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 16 and the job result is done. Oct 12 13:37:31 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Stopped target Timers. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 20 and the job result is done. Oct 12 13:37:31 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Stopped Daily Cleanup of User's Temporary Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 28 and the job result is done. Oct 12 13:37:31 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Closed D-Bus User Message Bus Socket. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 24 and the job result is done. Oct 12 13:37:31 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Stopped Create User's Volatile Files and Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 23 and the job result is done. Oct 12 13:37:31 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Removed slice User Application Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 22 and the job result is done. Oct 12 13:37:31 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Reached target Shutdown. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 15. Oct 12 13:37:31 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Finished Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Oct 12 13:37:31 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4050]: Reached target Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Oct 12 13:37:31 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: user@0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@0.service has successfully entered the 'dead' state. Oct 12 13:37:31 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Stopped User Manager for UID 0. ░░ Subject: A stop job for unit user@0.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@0.service has finished. ░░ ░░ The job identifier is 681 and the job result is done. Oct 12 13:37:31 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Stopping User Runtime Directory /run/user/0... ░░ Subject: A stop job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 682. Oct 12 13:37:31 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: run-user-0.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-user-0.mount has successfully entered the 'dead' state. Oct 12 13:37:31 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: user-runtime-dir@0.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-runtime-dir@0.service has successfully entered the 'dead' state. Oct 12 13:37:31 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Stopped User Runtime Directory /run/user/0. ░░ Subject: A stop job for unit user-runtime-dir@0.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@0.service has finished. ░░ ░░ The job identifier is 682 and the job result is done. Oct 12 13:37:31 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Removed slice User Slice of UID 0. ░░ Subject: A stop job for unit user-0.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-0.slice has finished. ░░ ░░ The job identifier is 683 and the job result is done. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com sshd[4089]: Accepted publickey for root from 10.31.9.217 port 49052 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com sshd[4090]: Accepted publickey for root from 10.31.9.217 port 49064 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Created slice User Slice of UID 0. ░░ Subject: A start job for unit user-0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-0.slice has finished successfully. ░░ ░░ The job identifier is 685. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Starting User Runtime Directory /run/user/0... ░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 689. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd-logind[584]: New session 3 of user root. ░░ Subject: A new session 3 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 3 has been created for the user root. ░░ ░░ The leading process of the session is 4089. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd-logind[584]: New session 4 of user root. ░░ Subject: A new session 4 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 4 has been created for the user root. ░░ ░░ The leading process of the session is 4090. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Finished User Runtime Directory /run/user/0. ░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has finished successfully. ░░ ░░ The job identifier is 689. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Starting User Manager for UID 0... ░░ Subject: A start job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 684. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4096]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0) Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4096]: Queued start job for default target Main User Target. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4096]: Created slice User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4096]: Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system). ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4096]: Started Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 6. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4096]: Reached target Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4096]: Reached target Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4096]: Starting D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4096]: Starting Create User's Volatile Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 7. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4096]: Finished Create User's Volatile Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 7. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4096]: Listening on D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4096]: Reached target Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4096]: Reached target Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4096]: Reached target Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[4096]: Startup finished in 52ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 0 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 52202 microseconds. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Started User Manager for UID 0. ░░ Subject: A start job for unit user@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has finished successfully. ░░ ░░ The job identifier is 684. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Started Session 3 of User root. ░░ Subject: A start job for unit session-3.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-3.scope has finished successfully. ░░ ░░ The job identifier is 750. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Started Session 4 of User root. ░░ Subject: A start job for unit session-4.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-4.scope has finished successfully. ░░ ░░ The job identifier is 817. Oct 12 13:37:33 ip-10-31-44-126.us-east-1.aws.redhat.com sshd[4089]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Oct 12 13:37:34 ip-10-31-44-126.us-east-1.aws.redhat.com sshd[4090]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Oct 12 13:37:34 ip-10-31-44-126.us-east-1.aws.redhat.com sshd[4106]: Received disconnect from 10.31.9.217 port 49064:11: disconnected by user Oct 12 13:37:34 ip-10-31-44-126.us-east-1.aws.redhat.com sshd[4106]: Disconnected from user root 10.31.9.217 port 49064 Oct 12 13:37:34 ip-10-31-44-126.us-east-1.aws.redhat.com sshd[4090]: pam_unix(sshd:session): session closed for user root Oct 12 13:37:34 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: session-4.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-4.scope has successfully entered the 'dead' state. Oct 12 13:37:34 ip-10-31-44-126.us-east-1.aws.redhat.com systemd-logind[584]: Session 4 logged out. Waiting for processes to exit. Oct 12 13:37:34 ip-10-31-44-126.us-east-1.aws.redhat.com systemd-logind[584]: Removed session 4. ░░ Subject: Session 4 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 4 has been terminated. Oct 12 13:38:11 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Starting Hostname Service... ░░ Subject: A start job for unit systemd-hostnamed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has begun execution. ░░ ░░ The job identifier is 885. Oct 12 13:38:11 ip-10-31-44-126.us-east-1.aws.redhat.com systemd[1]: Started Hostname Service. ░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has finished successfully. ░░ ░░ The job identifier is 885. Oct 12 13:38:11 managed-node3 systemd-hostnamed[5392]: Hostname set to (static) Oct 12 13:38:11 managed-node3 NetworkManager[619]: [1728754691.1383] hostname: static hostname changed from "ip-10-31-44-126.us-east-1.aws.redhat.com" to "managed-node3" Oct 12 13:38:11 managed-node3 systemd[1]: Starting Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 949. Oct 12 13:38:11 managed-node3 systemd[1]: Started Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 949. Oct 12 13:38:21 managed-node3 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Oct 12 13:38:41 managed-node3 systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Oct 12 13:38:55 managed-node3 sshd[6040]: Accepted publickey for root from 10.31.45.157 port 38818 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Oct 12 13:38:55 managed-node3 systemd-logind[584]: New session 6 of user root. ░░ Subject: A new session 6 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 6 has been created for the user root. ░░ ░░ The leading process of the session is 6040. Oct 12 13:38:55 managed-node3 systemd[1]: Started Session 6 of User root. ░░ Subject: A start job for unit session-6.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-6.scope has finished successfully. ░░ ░░ The job identifier is 1013. Oct 12 13:38:55 managed-node3 sshd[6040]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Oct 12 13:38:57 managed-node3 python3.9[6168]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Oct 12 13:38:58 managed-node3 python3.9[6301]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 12 13:38:58 managed-node3 python3.9[6408]: ansible-ansible.legacy.dnf Invoked with name=['tang'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 12 13:39:15 managed-node3 groupadd[6447]: group added to /etc/group: name=tang, GID=993 Oct 12 13:39:15 managed-node3 groupadd[6447]: group added to /etc/gshadow: name=tang Oct 12 13:39:15 managed-node3 groupadd[6447]: new group: name=tang, GID=993 Oct 12 13:39:15 managed-node3 useradd[6454]: new user: name=tang, UID=996, GID=993, home=/var/cache/tang, shell=/sbin/nologin, from=none Oct 12 13:39:17 managed-node3 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-r75a7a634b5ce45179370f614046e5074.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-r75a7a634b5ce45179370f614046e5074.service has finished successfully. ░░ ░░ The job identifier is 1080. Oct 12 13:39:17 managed-node3 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1143. Oct 12 13:39:17 managed-node3 systemd[1]: Reloading. Oct 12 13:39:17 managed-node3 systemd-rc-local-generator[6491]: /etc/rc.d/rc.local is not marked executable, skipping. Oct 12 13:39:18 managed-node3 systemd[1]: Queuing reload/restart jobs for marked units… Oct 12 13:39:20 managed-node3 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Oct 12 13:39:20 managed-node3 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1143. Oct 12 13:39:20 managed-node3 systemd[1]: run-r75a7a634b5ce45179370f614046e5074.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-r75a7a634b5ce45179370f614046e5074.service has successfully entered the 'dead' state. Oct 12 13:39:20 managed-node3 python3.9[6850]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-created keygen=/usr/libexec/tangd-keygen keydir=/var/db/tang update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False keys_to_deploy_dir=None Oct 12 13:39:21 managed-node3 python3.9[6970]: ansible-stat Invoked with path=/etc/systemd/system/tangd.socket.d follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 12 13:39:22 managed-node3 python3.9[7077]: ansible-file Invoked with path=/etc/systemd/system/tangd.socket.d state=absent mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 12 13:39:22 managed-node3 python3.9[7184]: ansible-ansible.legacy.systemd Invoked with name=tangd.socket state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Oct 12 13:39:23 managed-node3 systemd[1]: Reloading. Oct 12 13:39:23 managed-node3 systemd-rc-local-generator[7203]: /etc/rc.d/rc.local is not marked executable, skipping. Oct 12 13:39:23 managed-node3 systemd[1]: Starting Tang Server socket... ░░ Subject: A start job for unit tangd.socket has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit tangd.socket has begun execution. ░░ ░░ The job identifier is 1206. Oct 12 13:39:23 managed-node3 systemd[1]: Listening on Tang Server socket. ░░ Subject: A start job for unit tangd.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit tangd.socket has finished successfully. ░░ ░░ The job identifier is 1206. Oct 12 13:39:23 managed-node3 python3.9[7326]: ansible-ansible.legacy.dnf Invoked with name=['tang'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 12 13:39:25 managed-node3 python3.9[7434]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-created keygen=/usr/libexec/tangd-keygen keydir=/var/db/tang update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False keys_to_deploy_dir=None Oct 12 13:39:25 managed-node3 python3.9[7541]: ansible-find Invoked with paths=['/var/db/tang'] hidden=True patterns=['*.jwk', '.*.jwk'] read_whole_file=False file_type=file age_stamp=mtime recurse=False follow=False get_checksum=False use_regex=False exact_mode=True excludes=None contains=None age=None size=None depth=None mode=None encoding=None Oct 12 13:39:26 managed-node3 python3.9[7648]: ansible-ansible.legacy.stat Invoked with path=/var/db/tang/0JxuIAui2-StXnveXjlKajUcK0Dzw5m9Xxe47YbQOm8.jwk follow=True get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 12 13:39:26 managed-node3 python3.9[7773]: ansible-ansible.legacy.stat Invoked with path=/var/db/tang/EM9sqtBqgeUPoKyeXKTHGyqph1LS5QJJUjoUzIitLps.jwk follow=True get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 12 13:39:28 managed-node3 python3.9[7898]: ansible-stat Invoked with path=/etc/systemd/system/tangd.socket.d follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 12 13:39:28 managed-node3 python3.9[8005]: ansible-file Invoked with path=/etc/systemd/system/tangd.socket.d state=absent mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 12 13:39:29 managed-node3 python3.9[8112]: ansible-ansible.legacy.systemd Invoked with name=tangd.socket state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Oct 12 13:39:29 managed-node3 python3.9[8221]: ansible-find Invoked with paths=['/var/db/tang'] hidden=True patterns=['*.jwk', '.*.jwk'] read_whole_file=False file_type=file age_stamp=mtime recurse=False follow=False get_checksum=False use_regex=False exact_mode=True excludes=None contains=None age=None size=None depth=None mode=None encoding=None Oct 12 13:39:30 managed-node3 python3.9[8328]: ansible-ansible.legacy.dnf Invoked with name=['tang'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 12 13:39:31 managed-node3 python3.9[8436]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-created keygen=/usr/libexec/tangd-keygen keydir=/var/db/tang update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False keys_to_deploy_dir=None Oct 12 13:39:32 managed-node3 python3.9[8543]: ansible-find Invoked with paths=['/var/db/tang'] hidden=True patterns=['*.jwk', '.*.jwk'] read_whole_file=False file_type=file age_stamp=mtime recurse=False follow=False get_checksum=False use_regex=False exact_mode=True excludes=None contains=None age=None size=None depth=None mode=None encoding=None Oct 12 13:39:32 managed-node3 python3.9[8650]: ansible-tempfile Invoked with state=directory suffix=nbde_server_keys prefix=ansible. path=None Oct 12 13:39:34 managed-node3 python3.9[8757]: ansible-ansible.legacy.stat Invoked with path=/tmp/ansible.zsnhlxbenbde_server_keys/0JxuIAui2-StXnveXjlKajUcK0Dzw5m9Xxe47YbQOm8.jwk follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 12 13:39:34 managed-node3 python3.9[8842]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1728754773.833404-8060-76654130074876/.source.jwk dest=/tmp/ansible.zsnhlxbenbde_server_keys/ owner=tang group=tang mode=0400 _original_basename=0JxuIAui2-StXnveXjlKajUcK0Dzw5m9Xxe47YbQOm8.jwk follow=False checksum=4041108c3c9ca553bcc66ad3f23f99e6c543a9ed backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Oct 12 13:39:34 managed-node3 python3.9[8949]: ansible-ansible.legacy.stat Invoked with path=/tmp/ansible.zsnhlxbenbde_server_keys/EM9sqtBqgeUPoKyeXKTHGyqph1LS5QJJUjoUzIitLps.jwk follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 12 13:39:35 managed-node3 python3.9[9034]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1728754774.5949957-8060-279742814632618/.source.jwk dest=/tmp/ansible.zsnhlxbenbde_server_keys/ owner=tang group=tang mode=0400 _original_basename=EM9sqtBqgeUPoKyeXKTHGyqph1LS5QJJUjoUzIitLps.jwk follow=False checksum=2abbec838c5016faad5cdf7c9924af86b1424377 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Oct 12 13:39:35 managed-node3 python3.9[9141]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-deployed keydir=/var/db/tang keygen=/usr/libexec/tangd-keygen keys_to_deploy_dir=/tmp/ansible.zsnhlxbenbde_server_keys update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False Oct 12 13:39:36 managed-node3 python3.9[9248]: ansible-file Invoked with path=/tmp/ansible.zsnhlxbenbde_server_keys/ state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 12 13:39:36 managed-node3 python3.9[9355]: ansible-stat Invoked with path=/etc/systemd/system/tangd.socket.d follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 12 13:39:36 managed-node3 python3.9[9462]: ansible-file Invoked with path=/etc/systemd/system/tangd.socket.d state=absent mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 12 13:39:37 managed-node3 python3.9[9569]: ansible-ansible.legacy.systemd Invoked with name=tangd.socket state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Oct 12 13:39:38 managed-node3 python3.9[9678]: ansible-find Invoked with paths=['/var/db/tang'] hidden=True patterns=['*.jwk', '.*.jwk'] read_whole_file=False file_type=file age_stamp=mtime recurse=False follow=False get_checksum=False use_regex=False exact_mode=True excludes=None contains=None age=None size=None depth=None mode=None encoding=None Oct 12 13:39:38 managed-node3 python3.9[9785]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "true" = true ]; then rm -rf "/tmp/nbde_server_deploy_keysuqqal_4a" else rm -rf "/tmp/nbde_server_deploy_keysuqqal_4a"/* "/tmp/nbde_server_deploy_keysuqqal_4a"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 12 13:39:39 managed-node3 python3.9[9894]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/db/tang" else rm -rf "/var/db/tang"/* "/var/db/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 12 13:39:39 managed-node3 python3.9[10003]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/cache/tang" else rm -rf "/var/cache/tang"/* "/var/cache/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 12 13:39:41 managed-node3 python3.9[10147]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Oct 12 13:39:42 managed-node3 python3.9[10280]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 12 13:39:43 managed-node3 python3.9[10387]: ansible-ansible.legacy.dnf Invoked with name=['tang'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 12 13:39:44 managed-node3 python3.9[10495]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-created keygen=/usr/libexec/tangd-keygen keydir=/var/db/tang update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False keys_to_deploy_dir=None Oct 12 13:39:45 managed-node3 python3.9[10615]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/db/tang" else rm -rf "/var/db/tang"/* "/var/db/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 12 13:39:45 managed-node3 python3.9[10724]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/cache/tang" else rm -rf "/var/cache/tang"/* "/var/cache/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 12 13:39:46 managed-node3 python3.9[10833]: ansible-ansible.legacy.dnf Invoked with name=['tang'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 12 13:39:47 managed-node3 python3.9[10941]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-created keygen=/usr/libexec/tangd-keygen keydir=/var/db/tang update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False keys_to_deploy_dir=None Oct 12 13:39:48 managed-node3 python3.9[11061]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/db/tang" else rm -rf "/var/db/tang"/* "/var/db/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 12 13:39:48 managed-node3 python3.9[11170]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/cache/tang" else rm -rf "/var/cache/tang"/* "/var/cache/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 12 13:39:49 managed-node3 python3.9[11279]: ansible-ansible.legacy.dnf Invoked with name=['tang'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 12 13:39:50 managed-node3 python3.9[11387]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-created keygen=/usr/libexec/tangd-keygen keydir=/var/db/tang update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False keys_to_deploy_dir=None Oct 12 13:39:51 managed-node3 python3.9[11507]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/db/tang" else rm -rf "/var/db/tang"/* "/var/db/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 12 13:39:51 managed-node3 python3.9[11616]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/cache/tang" else rm -rf "/var/cache/tang"/* "/var/cache/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 12 13:39:52 managed-node3 python3.9[11725]: ansible-ansible.legacy.dnf Invoked with name=['tang'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 12 13:39:53 managed-node3 python3.9[11833]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-created keygen=/usr/libexec/tangd-keygen keydir=/var/db/tang update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False keys_to_deploy_dir=None Oct 12 13:39:54 managed-node3 python3.9[11953]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/db/tang" else rm -rf "/var/db/tang"/* "/var/db/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 12 13:39:54 managed-node3 python3.9[12062]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/cache/tang" else rm -rf "/var/cache/tang"/* "/var/cache/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 12 13:39:55 managed-node3 python3.9[12171]: ansible-ansible.legacy.dnf Invoked with name=['tang'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 12 13:39:56 managed-node3 python3.9[12279]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-created keygen=/usr/libexec/tangd-keygen keydir=/var/db/tang update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False keys_to_deploy_dir=None Oct 12 13:39:57 managed-node3 python3.9[12399]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/db/tang" else rm -rf "/var/db/tang"/* "/var/db/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 12 13:39:57 managed-node3 python3.9[12508]: ansible-ansible.legacy.command Invoked with _raw_params=set -euxo pipefail if [ "false" = true ]; then rm -rf "/var/cache/tang" else rm -rf "/var/cache/tang"/* "/var/cache/tang"/.* || : fi _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Oct 12 13:39:59 managed-node3 python3.9[12652]: ansible-file Invoked with path=/etc/systemd/system/tangd.socket.d state=directory mode=0775 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 12 13:40:00 managed-node3 python3.9[12759]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/tangd.socket.d/override2.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 12 13:40:00 managed-node3 python3.9[12844]: ansible-ansible.legacy.copy Invoked with dest=/etc/systemd/system/tangd.socket.d/override2.conf mode=0664 src=/root/.ansible/tmp/ansible-tmp-1728754799.7418828-10128-106422745829600/.source.conf _original_basename=.nq7c1bp6 follow=False checksum=05987691cc309e84627f31fa0d1680a3b3b2c4b2 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 12 13:40:01 managed-node3 python3.9[12951]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Oct 12 13:40:01 managed-node3 python3.9[13060]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 12 13:40:02 managed-node3 python3.9[13167]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Oct 12 13:40:02 managed-node3 python3.9[13223]: ansible-ansible.legacy.dnf Invoked with name=['tang'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 12 13:40:04 managed-node3 python3.9[13331]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-created keygen=/usr/libexec/tangd-keygen keydir=/var/db/tang update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False keys_to_deploy_dir=None Oct 12 13:40:04 managed-node3 python3.9[13451]: ansible-stat Invoked with path=/etc/systemd/system/tangd.socket.d follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 12 13:40:05 managed-node3 python3.9[13560]: ansible-find Invoked with paths=['/etc/systemd/system/tangd.socket.d'] file_type=any hidden=True excludes=['^override.conf$'] use_regex=True patterns=[] read_whole_file=False age_stamp=mtime recurse=False follow=False get_checksum=False exact_mode=True contains=None age=None size=None depth=None mode=None encoding=None Oct 12 13:40:05 managed-node3 python3.9[13667]: ansible-file Invoked with path=/etc/systemd/system/tangd.socket.d state=directory mode=0775 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 12 13:40:06 managed-node3 python3.9[13774]: ansible-ansible.legacy.setup Invoked with gather_subset=['!all'] filter=['ansible_service_mgr'] gather_timeout=10 fact_path=/etc/ansible/facts.d Oct 12 13:40:06 managed-node3 python3.9[13830]: ansible-ansible.legacy.systemd Invoked with name=tangd.socket state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Oct 12 13:40:07 managed-node3 python3.9[13939]: ansible-stat Invoked with path=/etc/systemd/system/tangd.socket.d follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 12 13:40:07 managed-node3 python3.9[14048]: ansible-stat Invoked with path=/etc/systemd/system/tangd.socket.d/override2.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 12 13:40:08 managed-node3 python3.9[14157]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Oct 12 13:40:08 managed-node3 python3.9[14213]: ansible-ansible.legacy.dnf Invoked with name=['tang'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 12 13:40:10 managed-node3 python3.9[14321]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-created keygen=/usr/libexec/tangd-keygen keydir=/var/db/tang update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False keys_to_deploy_dir=None Oct 12 13:40:10 managed-node3 python3.9[14428]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'python_version'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Oct 12 13:40:11 managed-node3 python3.9[14538]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 12 13:40:11 managed-node3 python3.9[14645]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 12 13:40:11 managed-node3 python3.9[14752]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Oct 12 13:40:12 managed-node3 python3.9[14808]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 12 13:40:13 managed-node3 python3.9[14916]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Oct 12 13:40:13 managed-node3 python3.9[14972]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 12 13:40:15 managed-node3 dbus-broker-launch[577]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Oct 12 13:40:15 managed-node3 dbus-broker-launch[577]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Oct 12 13:40:15 managed-node3 systemd[1]: Started /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-rb57719591f81410599f6b79eb33a4573.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-rb57719591f81410599f6b79eb33a4573.service has finished successfully. ░░ ░░ The job identifier is 1271. Oct 12 13:40:15 managed-node3 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1334. Oct 12 13:40:15 managed-node3 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Oct 12 13:40:15 managed-node3 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1334. Oct 12 13:40:15 managed-node3 systemd[1]: run-rb57719591f81410599f6b79eb33a4573.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-rb57719591f81410599f6b79eb33a4573.service has successfully entered the 'dead' state. Oct 12 13:40:16 managed-node3 python3.9[15382]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Oct 12 13:40:18 managed-node3 python3.9[15515]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['7500'] proto=tcp setype=tangd_port_t state=present local=True ignore_selinux_state=False reload=True Oct 12 13:40:22 managed-node3 kernel: SELinux: Converting 390 SID table entries... Oct 12 13:40:22 managed-node3 kernel: SELinux: policy capability network_peer_controls=1 Oct 12 13:40:22 managed-node3 kernel: SELinux: policy capability open_perms=1 Oct 12 13:40:22 managed-node3 kernel: SELinux: policy capability extended_socket_class=1 Oct 12 13:40:22 managed-node3 kernel: SELinux: policy capability always_check_network=0 Oct 12 13:40:22 managed-node3 kernel: SELinux: policy capability cgroup_seclabel=1 Oct 12 13:40:22 managed-node3 kernel: SELinux: policy capability nnp_nosuid_transition=1 Oct 12 13:40:22 managed-node3 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Oct 12 13:40:22 managed-node3 python3.9[15626]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Oct 12 13:40:26 managed-node3 python3.9[15733]: ansible-stat Invoked with path=/etc/systemd/system/tangd.socket.d follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 12 13:40:26 managed-node3 python3.9[15842]: ansible-file Invoked with path=/etc/systemd/system/tangd.socket.d state=directory mode=0775 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 12 13:40:26 managed-node3 python3.9[15949]: ansible-ansible.legacy.stat Invoked with path=/etc/systemd/system/tangd.socket.d/override.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Oct 12 13:40:27 managed-node3 python3.9[16034]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1728754826.5752347-11081-278147775395579/.source.conf dest=/etc/systemd/system/tangd.socket.d/override.conf backup=True mode=0644 follow=False _original_basename=tangd_socket_override.conf.j2 checksum=cab519df8c21e60fd06ac780e2c7bd41ad441042 force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 12 13:40:27 managed-node3 python3.9[16141]: ansible-setup Invoked with gather_subset=['!all', '!min', 'python_version', 'service_mgr'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Oct 12 13:40:28 managed-node3 python3.9[16251]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 12 13:40:28 managed-node3 python3.9[16358]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 12 13:40:29 managed-node3 python3.9[16465]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Oct 12 13:40:29 managed-node3 python3.9[16521]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 12 13:40:30 managed-node3 python3.9[16629]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Oct 12 13:40:31 managed-node3 dbus-broker-launch[578]: avc: op=load_policy lsm=selinux seqno=2 res=1 Oct 12 13:40:31 managed-node3 python3.9[16738]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Oct 12 13:40:31 managed-node3 systemd[1]: Reloading. Oct 12 13:40:31 managed-node3 systemd-rc-local-generator[16757]: /etc/rc.d/rc.local is not marked executable, skipping. Oct 12 13:40:31 managed-node3 systemd[1]: tangd.socket: Socket unit configuration has changed while unit has been running, no open socket file descriptor left. The socket unit is not functional until restarted. Oct 12 13:40:31 managed-node3 systemd[1]: Starting firewalld - dynamic firewall daemon... ░░ Subject: A start job for unit firewalld.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has begun execution. ░░ ░░ The job identifier is 1397. Oct 12 13:40:32 managed-node3 systemd[1]: Started firewalld - dynamic firewall daemon. ░░ Subject: A start job for unit firewalld.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has finished successfully. ░░ ░░ The job identifier is 1397. Oct 12 13:40:32 managed-node3 kernel: Warning: Unmaintained driver is detected: ip_set Oct 12 13:40:33 managed-node3 python3.9[16892]: ansible-fedora.linux_system_roles.firewall_lib Invoked with zone=public port=['7500/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None set_default_zone=None ipset=None ipset_type=None description=None short=None Oct 12 13:40:34 managed-node3 python3.9[17030]: ansible-systemd Invoked with daemon_reload=True daemon_reexec=False scope=system no_block=False name=None state=None enabled=None force=None masked=None Oct 12 13:40:34 managed-node3 systemd[1]: Reloading. Oct 12 13:40:34 managed-node3 systemd-rc-local-generator[17047]: /etc/rc.d/rc.local is not marked executable, skipping. Oct 12 13:40:34 managed-node3 systemd[1]: tangd.socket: Socket unit configuration has changed while unit has been running, no open socket file descriptor left. The socket unit is not functional until restarted. Oct 12 13:40:34 managed-node3 python3.9[17165]: ansible-ansible.legacy.systemd Invoked with name=tangd.socket state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Oct 12 13:40:34 managed-node3 systemd[1]: tangd.socket: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit tangd.socket has successfully entered the 'dead' state. Oct 12 13:40:34 managed-node3 systemd[1]: Closed Tang Server socket. ░░ Subject: A stop job for unit tangd.socket has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit tangd.socket has finished. ░░ ░░ The job identifier is 1466 and the job result is done. Oct 12 13:40:34 managed-node3 systemd[1]: Stopping Tang Server socket... ░░ Subject: A stop job for unit tangd.socket has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit tangd.socket has begun execution. ░░ ░░ The job identifier is 1466. Oct 12 13:40:34 managed-node3 systemd[1]: Starting Tang Server socket... ░░ Subject: A start job for unit tangd.socket has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit tangd.socket has begun execution. ░░ ░░ The job identifier is 1466. Oct 12 13:40:35 managed-node3 systemd[1]: Listening on Tang Server socket. ░░ Subject: A start job for unit tangd.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit tangd.socket has finished successfully. ░░ ░░ The job identifier is 1466. Oct 12 13:40:35 managed-node3 python3.9[17279]: ansible-stat Invoked with path=/etc/systemd/system/tangd.socket.d follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 12 13:40:35 managed-node3 python3.9[17388]: ansible-stat Invoked with path=/etc/systemd/system/tangd.socket.d/override2.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 12 13:40:36 managed-node3 python3.9[17497]: ansible-slurp Invoked with path=/etc/systemd/system/tangd.socket.d/override.conf src=/etc/systemd/system/tangd.socket.d/override.conf Oct 12 13:40:36 managed-node3 python3.9[17604]: ansible-file Invoked with path=/etc/systemd/system/tangd.socket.d/override2.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 12 13:40:37 managed-node3 python3.9[17711]: ansible-ansible.legacy.setup Invoked with filter=['ansible_pkg_mgr'] gather_subset=['!all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Oct 12 13:40:37 managed-node3 python3.9[17767]: ansible-ansible.legacy.dnf Invoked with name=['tang'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Oct 12 13:40:38 managed-node3 python3.9[17875]: ansible-fedora.linux_system_roles.nbde_server_tang Invoked with state=keys-created keygen=/usr/libexec/tangd-keygen keydir=/var/db/tang update=/usr/libexec/tangd-update cachedir=/var/cache/tang force=False keys_to_deploy_dir=None Oct 12 13:40:39 managed-node3 python3.9[17982]: ansible-stat Invoked with path=/etc/systemd/system/tangd.socket.d follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 12 13:40:39 managed-node3 python3.9[18091]: ansible-find Invoked with paths=['/etc/systemd/system/tangd.socket.d'] file_type=any hidden=True excludes=['^override.conf$'] use_regex=True patterns=[] read_whole_file=False age_stamp=mtime recurse=False follow=False get_checksum=False exact_mode=True contains=None age=None size=None depth=None mode=None encoding=None Oct 12 13:40:39 managed-node3 python3.9[18198]: ansible-file Invoked with path=/etc/systemd/system/tangd.socket.d state=absent mode=0775 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Oct 12 13:40:40 managed-node3 python3.9[18305]: ansible-systemd Invoked with daemon_reload=True daemon_reexec=False scope=system no_block=False name=None state=None enabled=None force=None masked=None Oct 12 13:40:40 managed-node3 systemd[1]: Reloading. Oct 12 13:40:40 managed-node3 systemd-rc-local-generator[18322]: /etc/rc.d/rc.local is not marked executable, skipping. Oct 12 13:40:40 managed-node3 systemd[1]: tangd.socket: Socket unit configuration has changed while unit has been running, no open socket file descriptor left. The socket unit is not functional until restarted. Oct 12 13:40:41 managed-node3 python3.9[18440]: ansible-ansible.legacy.systemd Invoked with name=tangd.socket state=restarted enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Oct 12 13:40:41 managed-node3 systemd[1]: tangd.socket: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit tangd.socket has successfully entered the 'dead' state. Oct 12 13:40:41 managed-node3 systemd[1]: Closed Tang Server socket. ░░ Subject: A stop job for unit tangd.socket has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit tangd.socket has finished. ░░ ░░ The job identifier is 1529 and the job result is done. Oct 12 13:40:41 managed-node3 systemd[1]: Stopping Tang Server socket... ░░ Subject: A stop job for unit tangd.socket has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit tangd.socket has begun execution. ░░ ░░ The job identifier is 1529. Oct 12 13:40:41 managed-node3 systemd[1]: Starting Tang Server socket... ░░ Subject: A start job for unit tangd.socket has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit tangd.socket has begun execution. ░░ ░░ The job identifier is 1529. Oct 12 13:40:41 managed-node3 systemd[1]: Listening on Tang Server socket. ░░ Subject: A start job for unit tangd.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit tangd.socket has finished successfully. ░░ ░░ The job identifier is 1529. Oct 12 13:40:41 managed-node3 python3.9[18554]: ansible-stat Invoked with path=/etc/systemd/system/tangd.socket.d follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Oct 12 13:40:42 managed-node3 python3.9[18661]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None TASK [Cleanup] ***************************************************************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:100 Saturday 12 October 2024 13:40:42 -0400 (0:00:00.515) 0:00:43.195 ****** included: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tasks/cleanup.yml for managed-node3 TASK [Remove control node files/directories] *********************************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tasks/cleanup.yml:2 Saturday 12 October 2024 13:40:42 -0400 (0:00:00.047) 0:00:43.243 ****** skipping: [managed-node3] => { "changed": false, "false_condition": "nbde_server_keys_dir | d(\"\") is match(\"^/\")", "skip_reason": "Conditional result was False" } TASK [Remove managed node files/directories] *********************************** task path: /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tasks/cleanup.yml:9 Saturday 12 October 2024 13:40:42 -0400 (0:00:00.047) 0:00:43.290 ****** skipping: [managed-node3] => (item={'path': '', 'remove_dir': 'true'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "item.path is match(\"^/\")", "item": { "path": "", "remove_dir": "true" }, "skip_reason": "Conditional result was False" } changed: [managed-node3] => (item={'path': '/var/db/tang', 'remove_dir': 'false'}) => { "ansible_loop_var": "item", "changed": true, "cmd": "set -euxo pipefail\nif [ \"false\" = true ]; then\n rm -rf \"/var/db/tang\"\nelse\n rm -rf \"/var/db/tang\"/* \"/var/db/tang\"/.* || :\nfi\n", "delta": "0:00:00.004738", "end": "2024-10-12 13:40:42.592523", "item": { "path": "/var/db/tang", "remove_dir": "false" }, "rc": 0, "start": "2024-10-12 13:40:42.587785" } STDERR: + '[' false = true ']' + rm -rf /var/db/tang/OTrRHus4nesA1CRzaym4vd4_1f-SO68Is1O-w2I4yg0.jwk /var/db/tang/qa2YDoEOpOMKMBd5KItKdvI_WhA41w88UO2WI-fNZ58.jwk /var/db/tang/. /var/db/tang/.. rm: refusing to remove '.' or '..' directory: skipping '/var/db/tang/.' rm: refusing to remove '.' or '..' directory: skipping '/var/db/tang/..' + : changed: [managed-node3] => (item={'path': '/var/cache/tang', 'remove_dir': 'false'}) => { "ansible_loop_var": "item", "changed": true, "cmd": "set -euxo pipefail\nif [ \"false\" = true ]; then\n rm -rf \"/var/cache/tang\"\nelse\n rm -rf \"/var/cache/tang\"/* \"/var/cache/tang\"/.* || :\nfi\n", "delta": "0:00:00.004487", "end": "2024-10-12 13:40:42.898507", "item": { "path": "/var/cache/tang", "remove_dir": "false" }, "rc": 0, "start": "2024-10-12 13:40:42.894020" } STDERR: + '[' false = true ']' + rm -rf '/var/cache/tang/*' '/var/cache/tang/.*' PLAY RECAP ********************************************************************* managed-node3 : ok=83 changed=13 unreachable=0 failed=0 skipped=55 rescued=0 ignored=0 Saturday 12 October 2024 13:40:42 -0400 (0:00:00.692) 0:00:43.983 ****** =============================================================================== fedora.linux_system_roles.selinux : Set an SELinux label on a port ------ 5.65s /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:87 fedora.linux_system_roles.selinux : Get SELinux modules facts ----------- 3.40s /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/main.yml:112 fedora.linux_system_roles.selinux : Install SELinux tool semanage ------- 2.33s /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:58 fedora.linux_system_roles.nbde_server : Ensure tang is installed -------- 1.81s /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:2 fedora.linux_system_roles.selinux : Install SELinux python3 tools ------- 1.60s /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:35 fedora.linux_system_roles.firewall : Install firewalld ------------------ 1.57s /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 fedora.linux_system_roles.nbde_server : Ensure tang is installed -------- 1.57s /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:2 fedora.linux_system_roles.nbde_server : Ensure tang is installed -------- 1.55s /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:2 fedora.linux_system_roles.firewall : Enable and start firewalld service --- 1.35s /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 fedora.linux_system_roles.nbde_server : Ensure required services are enabled and at the right state --- 1.09s /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:39 fedora.linux_system_roles.firewall : Configure firewall ----------------- 1.03s /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Create a customization systemd file ------------------------------------- 0.89s /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:20 Check custom file ------------------------------------------------------- 0.87s /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:39 fedora.linux_system_roles.nbde_server : Ensure ansible_facts used by role --- 0.77s /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/set_vars.yml:2 fedora.linux_system_roles.selinux : Refresh facts ----------------------- 0.73s /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/selinux/tasks/ensure_selinux_packages.yml:89 Remove managed node files/directories ----------------------------------- 0.69s /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tasks/cleanup.yml:9 Create the tangd.socket.d directory ------------------------------------- 0.69s /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/tests/nbde_server/tests_share_system_dir.yml:14 fedora.linux_system_roles.nbde_server : Creates the file with the port entry that we want tangd to listen to --- 0.67s /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/tangd-custom-port.yml:44 fedora.linux_system_roles.nbde_server : Reload the daemons so the new changes take effect --- 0.65s /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:34 fedora.linux_system_roles.nbde_server : Reload the daemons so the new changes take effect --- 0.64s /tmp/collections-jiT/ansible_collections/fedora/linux_system_roles/roles/nbde_server/tasks/main-tang.yml:34