● compute-0
    State: running
    Units: 479 loaded (incl. loaded aliases)
     Jobs: 0 queued
   Failed: 0 units
    Since: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
  systemd: 252-64.el9
   CGroup: /
           ├─267250 turbostat --debug sleep 10
           ├─267256 sleep 10
           ├─init.scope
           │ └─1 /usr/lib/systemd/systemd --switched-root --system --deserialize 31
           ├─machine.slice
           │ ├─libpod-573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b.scope
           │ │ └─container
           │ │   ├─104655 dumb-init --single-child -- kolla_start
           │ │   ├─104658 "neutron-ovn-metadata-agent (/usr/bin/python3 /usr/bin/neutron-ovn-metadata-agent)"
           │ │   ├─104880 "neutron-ovn-metadata-agent (/usr/bin/python3 /usr/bin/neutron-ovn-metadata-agent)"
           │ │   ├─104920 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.namespace_cmd --privsep_sock_path /tmp/tmpp73u3fin/privsep.sock
           │ │   ├─212547 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.default --privsep_sock_path /tmp/tmpr5q8om25/privsep.sock
           │ │   └─212743 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.link_cmd --privsep_sock_path /tmp/tmp3la6aon7/privsep.sock
           │ ├─libpod-60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5.scope
           │ │ └─container
           │ │   ├─192801 dumb-init --single-child -- kolla_start
           │ │   ├─192804 "ceilometer-polling: master process [/usr/bin/ceilometer-polling --polling-namespaces compute --logfile /dev/stdout]"
           │ │   └─192931 "ceilometer-polling: AgentManager worker(0)"
           │ ├─libpod-60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97.scope
           │ │ └─container
           │ │   ├─95418 dumb-init --single-child -- kolla_start
           │ │   └─95421 /usr/bin/ovn-controller --pidfile unix:/run/openvswitch/db.sock -p /etc/pki/tls/private/ovndb.key -c /etc/pki/tls/certs/ovndb.crt -C /etc/pki/tls/certs/ovndbca.crt
           │ ├─libpod-8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a.scope
           │ │ └─container
           │ │   └─202050 /app/openstack-network-exporter
           │ ├─libpod-925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e.scope
           │ │ └─container
           │ │   ├─183117 dumb-init --single-child -- kolla_start
           │ │   ├─183119 /usr/bin/python3 /usr/bin/nova-compute
           │ │   ├─212621 /usr/bin/python3 /bin/privsep-helper --config-file /etc/nova/nova.conf --config-file /etc/nova/nova-compute.conf --config-dir /etc/nova/nova.conf.d --privsep_context nova.privsep.sys_admin_pctxt --privsep_sock_path /tmp/tmp5s1ioym_/privsep.sock
           │ │   └─212642 /usr/bin/python3 /bin/privsep-helper --config-file /etc/nova/nova.conf --config-file /etc/nova/nova-compute.conf --config-dir /etc/nova/nova.conf.d --privsep_context vif_plug_ovs.privsep.vif_plug --privsep_sock_path /tmp/tmpmo51pq8n/privsep.sock
           │ ├─libpod-ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735.scope
           │ │ └─container
           │ │   └─198927 /bin/podman_exporter --web.config.file=/etc/podman_exporter/podman_exporter.yaml
           │ ├─libpod-e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0.scope
           │ │ └─container
           │ │   └─195798 /bin/node_exporter --web.config.file=/etc/node_exporter/node_exporter.yaml --web.disable-exporter-metrics --collector.systemd "--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service" --no-collector.dmi --no-collector.entropy --no-collector.thermal_zone --no-collector.time --no-collector.timex --no-collector.uname --no-collector.stat --no-collector.hwmon --no-collector.os --no-collector.selinux --no-collector.textfile --no-collector.powersupplyclass --no-collector.pressure --no-collector.rapl
           │ ├─machine-qemu\x2d10\x2dinstance\x2d0000000f.scope
           │ │ └─libvirt
           │ │   └─216930 /usr/libexec/qemu-kvm -name guest=instance-0000000f,debug-threads=on -S -object "{\"qom-type\":\"secret\",\"id\":\"masterKey0\",\"format\":\"raw\",\"file\":\"/var/lib/libvirt/qemu/domain-10-instance-0000000f/master-key.aes\"}" -machine pc-q35-rhel9.8.0,usb=off,dump-guest-core=off,memory-backend=pc.ram,hpet=off,acpi=on -accel kvm -cpu EPYC-Rome,x2apic=on,tsc-deadline=on,hypervisor=on,tsc-adjust=on,spec-ctrl=on,stibp=on,ssbd=on,cmp-legacy=on,overflow-recov=on,succor=on,ibrs=on,amd-ssbd=on,virt-ssbd=on,lbrv=on,tsc-scale=on,vmcb-clean=on,flushbyasid=on,pause-filter=on,pfthreshold=on,svme-addr-chk=on,lfence-always-serializing=on,xsaves=off -m size=1048576k -object "{\"qom-type\":\"memory-backend-ram\",\"id\":\"pc.ram\",\"size\":1073741824}" -overcommit mem-lock=off -smp 1,sockets=1,dies=1,clusters=1,cores=1,threads=1 -uuid 69c40217-ae22-4704-ad01-f2ca06c42d58 -smbios "type=1,manufacturer=RDO,product=OpenStack Compute,version=27.5.2-0.20260127144738.eaa65f0.el9,serial=69c40217-ae22-4704-ad01-f2ca06c42d58,uuid=69c40217-ae22-4704-ad01-f2ca06c42d58,family=Virtual Machine" -no-user-config -nodefaults -chardev socket,id=charmonitor,fd=33,server=on,wait=off -mon chardev=charmonitor,id=monitor,mode=control -rtc base=utc,driftfix=slew -global kvm-pit.lost_tick_policy=delay -no-shutdown -boot strict=on -device "{\"driver\":\"pcie-root-port\",\"port\":16,\"chassis\":1,\"id\":\"pci.1\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":17,\"chassis\":2,\"id\":\"pci.2\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":18,\"chassis\":3,\"id\":\"pci.3\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":19,\"chassis\":4,\"id\":\"pci.4\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":20,\"chassis\":5,\"id\":\"pci.5\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":21,\"chassis\":6,\"id\":\"pci.6\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":22,\"chassis\":7,\"id\":\"pci.7\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":23,\"chassis\":8,\"id\":\"pci.8\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":24,\"chassis\":9,\"id\":\"pci.9\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":25,\"chassis\":10,\"id\":\"pci.10\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":26,\"chassis\":11,\"id\":\"pci.11\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":27,\"chassis\":12,\"id\":\"pci.12\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":28,\"chassis\":13,\"id\":\"pci.13\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":29,\"chassis\":14,\"id\":\"pci.14\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":30,\"chassis\":15,\"id\":\"pci.15\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":31,\"chassis\":16,\"id\":\"pci.16\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":32,\"chassis\":17,\"id\":\"pci.17\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":33,\"chassis\":18,\"id\":\"pci.18\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":34,\"chassis\":19,\"id\":\"pci.19\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":35,\"chassis\":20,\"id\":\"pci.20\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":36,\"chassis\":21,\"id\":\"pci.21\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":37,\"chassis\":22,\"id\":\"pci.22\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":38,\"chassis\":23,\"id\":\"pci.23\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":39,\"chassis\":24,\"id\":\"pci.24\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":40,\"chassis\":25,\"id\":\"pci.25\",\"bus\":\"pcie.0\",\"addr\":\"0x5\"}" -device "{\"driver\":\"pcie-pci-bridge\",\"id\":\"pci.26\",\"bus\":\"pci.1\",\"addr\":\"0x0\"}" -device "{\"driver\":\"piix3-usb-uhci\",\"id\":\"usb\",\"bus\":\"pci.26\",\"addr\":\"0x1\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/_base/bbb3d8d61ef6b68186f44149e3aba39e4a3bf32e\",\"node-name\":\"libvirt-3-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/69c40217-ae22-4704-ad01-f2ca06c42d58/disk\",\"node-name\":\"libvirt-2-storage\",\"auto-read-only\":true,\"discard\":\"unmap\",\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"node-name\":\"libvirt-2-format\",\"read-only\":false,\"cache\":{\"direct\":true,\"no-flush\":false},\"driver\":\"qcow2\",\"file\":\"libvirt-2-storage\",\"backing\":\"libvirt-3-storage\"}" -device "{\"driver\":\"virtio-blk-pci\",\"bus\":\"pci.3\",\"addr\":\"0x0\",\"drive\":\"libvirt-2-format\",\"id\":\"virtio-disk0\",\"bootindex\":1,\"write-cache\":\"on\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/69c40217-ae22-4704-ad01-f2ca06c42d58/disk.config\",\"node-name\":\"libvirt-1-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -device "{\"driver\":\"ide-cd\",\"bus\":\"ide.0\",\"drive\":\"libvirt-1-storage\",\"id\":\"sata0-0-0\",\"write-cache\":\"on\"}" -netdev "{\"type\":\"tap\",\"fd\":\"39\",\"vhost\":true,\"vhostfd\":\"41\",\"id\":\"hostnet0\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet0\",\"id\":\"net0\",\"mac\":\"fa:16:3e:d1:43:c6\",\"bus\":\"pci.2\",\"addr\":\"0x0\"}" -add-fd set=0,fd=37,opaque=serial0-log -chardev pty,id=charserial0,logfile=/dev/fdset/0,logappend=on -device "{\"driver\":\"isa-serial\",\"chardev\":\"charserial0\",\"id\":\"serial0\",\"index\":0}" -device "{\"driver\":\"usb-tablet\",\"id\":\"input0\",\"bus\":\"usb.0\",\"port\":\"1\"}" -audiodev "{\"id\":\"audio1\",\"driver\":\"none\"}" -object "{\"qom-type\":\"tls-creds-x509\",\"id\":\"vnc-tls-creds0\",\"dir\":\"/etc/pki/qemu\",\"endpoint\":\"server\",\"verify-peer\":true}" -vnc "[::0]:2,tls-creds=vnc-tls-creds0,audiodev=audio1" -device "{\"driver\":\"virtio-vga\",\"id\":\"video0\",\"max_outputs\":1,\"bus\":\"pcie.0\",\"addr\":\"0x1\"}" -global ICH9-LPC.noreboot=off -watchdog-action reset -device "{\"driver\":\"virtio-balloon-pci\",\"id\":\"balloon0\",\"bus\":\"pci.4\",\"addr\":\"0x0\"}" -object "{\"qom-type\":\"rng-random\",\"id\":\"objrng0\",\"filename\":\"/dev/urandom\"}" -device "{\"driver\":\"virtio-rng-pci\",\"rng\":\"objrng0\",\"id\":\"rng0\",\"bus\":\"pci.5\",\"addr\":\"0x0\"}" -device "{\"driver\":\"vmcoreinfo\"}" -sandbox on,obsolete=deny,elevateprivileges=deny,spawn=deny,resourcecontrol=deny -msg timestamp=on
           │ └─machine-qemu\x2d9\x2dinstance\x2d0000000d.scope
           │   └─libvirt
           │     └─216871 /usr/libexec/qemu-kvm -name guest=instance-0000000d,debug-threads=on -S -object "{\"qom-type\":\"secret\",\"id\":\"masterKey0\",\"format\":\"raw\",\"file\":\"/var/lib/libvirt/qemu/domain-9-instance-0000000d/master-key.aes\"}" -machine pc-q35-rhel9.8.0,usb=off,dump-guest-core=off,memory-backend=pc.ram,hpet=off,acpi=on -accel kvm -cpu EPYC-Rome,x2apic=on,tsc-deadline=on,hypervisor=on,tsc-adjust=on,spec-ctrl=on,stibp=on,ssbd=on,cmp-legacy=on,overflow-recov=on,succor=on,ibrs=on,amd-ssbd=on,virt-ssbd=on,lbrv=on,tsc-scale=on,vmcb-clean=on,flushbyasid=on,pause-filter=on,pfthreshold=on,svme-addr-chk=on,lfence-always-serializing=on,xsaves=off -m size=1048576k -object "{\"qom-type\":\"memory-backend-ram\",\"id\":\"pc.ram\",\"size\":1073741824}" -overcommit mem-lock=off -smp 1,sockets=1,dies=1,clusters=1,cores=1,threads=1 -uuid 639167ea-9de1-4930-b106-5a6f4a1a260d -smbios "type=1,manufacturer=RDO,product=OpenStack Compute,version=27.5.2-0.20260127144738.eaa65f0.el9,serial=639167ea-9de1-4930-b106-5a6f4a1a260d,uuid=639167ea-9de1-4930-b106-5a6f4a1a260d,family=Virtual Machine" -no-user-config -nodefaults -chardev socket,id=charmonitor,fd=26,server=on,wait=off -mon chardev=charmonitor,id=monitor,mode=control -rtc base=utc,driftfix=slew -global kvm-pit.lost_tick_policy=delay -no-shutdown -boot strict=on -device "{\"driver\":\"pcie-root-port\",\"port\":16,\"chassis\":1,\"id\":\"pci.1\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":17,\"chassis\":2,\"id\":\"pci.2\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":18,\"chassis\":3,\"id\":\"pci.3\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":19,\"chassis\":4,\"id\":\"pci.4\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":20,\"chassis\":5,\"id\":\"pci.5\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":21,\"chassis\":6,\"id\":\"pci.6\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":22,\"chassis\":7,\"id\":\"pci.7\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":23,\"chassis\":8,\"id\":\"pci.8\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":24,\"chassis\":9,\"id\":\"pci.9\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":25,\"chassis\":10,\"id\":\"pci.10\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":26,\"chassis\":11,\"id\":\"pci.11\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":27,\"chassis\":12,\"id\":\"pci.12\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":28,\"chassis\":13,\"id\":\"pci.13\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":29,\"chassis\":14,\"id\":\"pci.14\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":30,\"chassis\":15,\"id\":\"pci.15\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":31,\"chassis\":16,\"id\":\"pci.16\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":32,\"chassis\":17,\"id\":\"pci.17\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":33,\"chassis\":18,\"id\":\"pci.18\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":34,\"chassis\":19,\"id\":\"pci.19\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":35,\"chassis\":20,\"id\":\"pci.20\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":36,\"chassis\":21,\"id\":\"pci.21\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":37,\"chassis\":22,\"id\":\"pci.22\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":38,\"chassis\":23,\"id\":\"pci.23\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":39,\"chassis\":24,\"id\":\"pci.24\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":40,\"chassis\":25,\"id\":\"pci.25\",\"bus\":\"pcie.0\",\"addr\":\"0x5\"}" -device "{\"driver\":\"pcie-pci-bridge\",\"id\":\"pci.26\",\"bus\":\"pci.1\",\"addr\":\"0x0\"}" -device "{\"driver\":\"piix3-usb-uhci\",\"id\":\"usb\",\"bus\":\"pci.26\",\"addr\":\"0x1\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/_base/bbb3d8d61ef6b68186f44149e3aba39e4a3bf32e\",\"node-name\":\"libvirt-3-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/639167ea-9de1-4930-b106-5a6f4a1a260d/disk\",\"node-name\":\"libvirt-2-storage\",\"auto-read-only\":true,\"discard\":\"unmap\",\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"node-name\":\"libvirt-2-format\",\"read-only\":false,\"cache\":{\"direct\":true,\"no-flush\":false},\"driver\":\"qcow2\",\"file\":\"libvirt-2-storage\",\"backing\":\"libvirt-3-storage\"}" -device "{\"driver\":\"virtio-blk-pci\",\"bus\":\"pci.3\",\"addr\":\"0x0\",\"drive\":\"libvirt-2-format\",\"id\":\"virtio-disk0\",\"bootindex\":1,\"write-cache\":\"on\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/639167ea-9de1-4930-b106-5a6f4a1a260d/disk.config\",\"node-name\":\"libvirt-1-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -device "{\"driver\":\"ide-cd\",\"bus\":\"ide.0\",\"drive\":\"libvirt-1-storage\",\"id\":\"sata0-0-0\",\"write-cache\":\"on\"}" -netdev "{\"type\":\"tap\",\"fd\":\"31\",\"vhost\":true,\"vhostfd\":\"36\",\"id\":\"hostnet0\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet0\",\"id\":\"net0\",\"mac\":\"fa:16:3e:72:3c:9d\",\"bus\":\"pci.2\",\"addr\":\"0x0\"}" -add-fd set=0,fd=29,opaque=serial0-log -chardev pty,id=charserial0,logfile=/dev/fdset/0,logappend=on -device "{\"driver\":\"isa-serial\",\"chardev\":\"charserial0\",\"id\":\"serial0\",\"index\":0}" -device "{\"driver\":\"usb-tablet\",\"id\":\"input0\",\"bus\":\"usb.0\",\"port\":\"1\"}" -audiodev "{\"id\":\"audio1\",\"driver\":\"none\"}" -object "{\"qom-type\":\"tls-creds-x509\",\"id\":\"vnc-tls-creds0\",\"dir\":\"/etc/pki/qemu\",\"endpoint\":\"server\",\"verify-peer\":true}" -vnc "[::0]:1,tls-creds=vnc-tls-creds0,audiodev=audio1" -device "{\"driver\":\"virtio-vga\",\"id\":\"video0\",\"max_outputs\":1,\"bus\":\"pcie.0\",\"addr\":\"0x1\"}" -global ICH9-LPC.noreboot=off -watchdog-action reset -device "{\"driver\":\"virtio-balloon-pci\",\"id\":\"balloon0\",\"bus\":\"pci.4\",\"addr\":\"0x0\"}" -object "{\"qom-type\":\"rng-random\",\"id\":\"objrng0\",\"filename\":\"/dev/urandom\"}" -device "{\"driver\":\"virtio-rng-pci\",\"rng\":\"objrng0\",\"id\":\"rng0\",\"bus\":\"pci.5\",\"addr\":\"0x0\"}" -device "{\"driver\":\"vmcoreinfo\"}" -sandbox on,obsolete=deny,elevateprivileges=deny,spawn=deny,resourcecontrol=deny -msg timestamp=on
           ├─system.slice
           │ ├─NetworkManager.service
           │ │ └─55516 /usr/sbin/NetworkManager --no-daemon
           │ ├─auditd.service
           │ │ ├─703 /sbin/auditd
           │ │ └─705 /usr/sbin/sedispatch
           │ ├─chronyd.service
           │ │ └─64957 /usr/sbin/chronyd -F 2
           │ ├─crond.service
           │ │ └─1008 /usr/sbin/crond -n
           │ ├─dbus-broker.service
           │ │ ├─745 /usr/bin/dbus-broker-launch --scope system --audit
           │ │ └─774 dbus-broker --log 4 --controller 9 --machine-id bf0bc0bb03de29b24cba1cc9599cf5d0 --max-bytes 536870912 --max-fds 4096 --max-matches 131072 --audit
           │ ├─edpm_ceilometer_agent_compute.service
           │ │ └─192799 /usr/bin/conmon --api-version 1 -c 60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5 -u 60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5/userdata -p /run/containers/storage/overlay-containers/60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5/userdata/pidfile -n ceilometer_agent_compute --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5/userdata/oci-log --conmon-pidfile /run/ceilometer_agent_compute.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5
           │ ├─edpm_node_exporter.service
           │ │ └─195796 /usr/bin/conmon --api-version 1 -c e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0 -u e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0/userdata -p /run/containers/storage/overlay-containers/e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0/userdata/pidfile -n node_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0/userdata/oci-log --conmon-pidfile /run/node_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0
           │ ├─edpm_nova_compute.service
           │ │ └─183115 /usr/bin/conmon --api-version 1 -c 925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e -u 925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e/userdata -p /run/containers/storage/overlay-containers/925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e/userdata/pidfile -n nova_compute --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e/userdata/oci-log --conmon-pidfile /run/nova_compute.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e
           │ ├─edpm_openstack_network_exporter.service
           │ │ └─202048 /usr/bin/conmon --api-version 1 -c 8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a -u 8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a/userdata -p /run/containers/storage/overlay-containers/8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a/userdata/pidfile -n openstack_network_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a/userdata/oci-log --conmon-pidfile /run/openstack_network_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a
           │ ├─edpm_ovn_controller.service
           │ │ └─95416 /usr/bin/conmon --api-version 1 -c 60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97 -u 60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97/userdata -p /run/containers/storage/overlay-containers/60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97/userdata/pidfile -n ovn_controller --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97/userdata/oci-log --conmon-pidfile /run/ovn_controller.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97
           │ ├─edpm_ovn_metadata_agent.service
           │ │ └─104653 /usr/bin/conmon --api-version 1 -c 573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b -u 573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b/userdata -p /run/containers/storage/overlay-containers/573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b/userdata/pidfile -n ovn_metadata_agent --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b/userdata/oci-log --conmon-pidfile /run/ovn_metadata_agent.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b
           │ ├─edpm_podman_exporter.service
           │ │ └─198925 /usr/bin/conmon --api-version 1 -c ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735 -u ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735/userdata -p /run/containers/storage/overlay-containers/ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735/userdata/pidfile -n podman_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735/userdata/oci-log --conmon-pidfile /run/podman_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735
           │ ├─gssproxy.service
           │ │ └─873 /usr/sbin/gssproxy -D
           │ ├─irqbalance.service
           │ │ └─781 /usr/sbin/irqbalance
           │ ├─iscsid.service
           │ │ └─168784 /usr/sbin/iscsid -f
           │ ├─multipathd.service
           │ │ └─168942 /sbin/multipathd -d -s
           │ ├─ovs-vswitchd.service
           │ │ └─53815 ovs-vswitchd unix:/var/run/openvswitch/db.sock -vconsole:emer -vsyslog:err -vfile:info --mlockall --user openvswitch:hugetlbfs --no-chdir --log-file=/var/log/openvswitch/ovs-vswitchd.log --pidfile=/var/run/openvswitch/ovs-vswitchd.pid --detach
           │ ├─ovsdb-server.service
           │ │ └─53733 ovsdb-server /etc/openvswitch/conf.db -vconsole:emer -vsyslog:err -vfile:info --remote=punix:/var/run/openvswitch/db.sock --private-key=db:Open_vSwitch,SSL,private_key --certificate=db:Open_vSwitch,SSL,certificate --bootstrap-ca-cert=db:Open_vSwitch,SSL,ca_cert --user openvswitch:hugetlbfs --no-chdir --log-file=/var/log/openvswitch/ovsdb-server.log --pidfile=/var/run/openvswitch/ovsdb-server.pid --detach
           │ ├─podman.service
           │ │ └─198936 /usr/bin/podman --log-level=info system service
           │ ├─polkit.service
           │ │ └─43692 /usr/lib/polkit-1/polkitd --no-debug
           │ ├─rpcbind.service
           │ │ └─701 /usr/bin/rpcbind -w -f
           │ ├─rsyslog.service
           │ │ └─1004 /usr/sbin/rsyslogd -n
           │ ├─sshd.service
           │ │ └─129563 "sshd: /usr/sbin/sshd -D [listener] 0 of 10-100 startups"
           │ ├─system-getty.slice
           │ │ └─getty@tty1.service
           │ │   └─1010 /sbin/agetty -o "-p -- \\u" --noclear - linux
           │ ├─system-serial\x2dgetty.slice
           │ │ └─serial-getty@ttyS0.service
           │ │   └─1011 /sbin/agetty -o "-p -- \\u" --keep-baud 115200,57600,38400,9600 - vt220
           │ ├─systemd-hostnamed.service
           │ │ └─265104 /usr/lib/systemd/systemd-hostnamed
           │ ├─systemd-journald.service
           │ │ └─679 /usr/lib/systemd/systemd-journald
           │ ├─systemd-logind.service
           │ │ └─791 /usr/lib/systemd/systemd-logind
           │ ├─systemd-machined.service
           │ │ └─154436 /usr/lib/systemd/systemd-machined
           │ ├─systemd-udevd.service
           │ │ └─udev
           │ │   └─731 /usr/lib/systemd/systemd-udevd
           │ ├─tuned.service
           │ │ └─43869 /usr/bin/python3 -Es /usr/sbin/tuned -l -P
           │ ├─virtlogd.service
           │ │ └─153807 /usr/sbin/virtlogd
           │ ├─virtnodedevd.service
           │ │ └─183033 /usr/sbin/virtnodedevd --timeout 120
           │ └─virtqemud.service
           │   └─182740 /usr/sbin/virtqemud --timeout 120
           └─user.slice
             └─user-1000.slice
               ├─session-1.scope
               │ └─4517 /usr/bin/python3
               ├─session-100.scope
               │ ├─245026 "sshd-session: zuul [priv]"
               │ └─245029 "sshd-session: zuul@notty"
               ├─session-102.scope
               │ ├─245122 "sshd-session: zuul [priv]"
               │ └─245125 "sshd-session: zuul@notty"
               ├─session-103.scope
               │ ├─245415 "sshd-session: zuul [priv]"
               │ └─245418 "sshd-session: zuul@notty"
               ├─session-105.scope
               │ ├─245517 "sshd-session: zuul [priv]"
               │ └─245520 "sshd-session: zuul@notty"
               ├─session-108.scope
               │ ├─245810 "sshd-session: zuul [priv]"
               │ └─245829 "sshd-session: zuul@notty"
               ├─session-110.scope
               │ ├─245913 "sshd-session: zuul [priv]"
               │ └─245929 "sshd-session: zuul@notty"
               ├─session-111.scope
               │ ├─245989 "sshd-session: zuul [priv]"
               │ └─245992 "sshd-session: zuul@notty"
               ├─session-113.scope
               │ ├─246092 "sshd-session: zuul [priv]"
               │ └─246095 "sshd-session: zuul@notty"
               ├─session-120.scope
               │ ├─246976 "sshd-session: zuul [priv]"
               │ └─246979 "sshd-session: zuul@notty"
               ├─session-122.scope
               │ ├─247166 "sshd-session: zuul [priv]"
               │ └─247169 "sshd-session: zuul@notty"
               ├─session-124.scope
               │ ├─247625 "sshd-session: zuul [priv]"
               │ └─247628 "sshd-session: zuul@notty"
               ├─session-126.scope
               │ ├─247788 "sshd-session: zuul [priv]"
               │ └─247791 "sshd-session: zuul@notty"
               ├─session-128.scope
               │ ├─248316 "sshd-session: zuul [priv]"
               │ └─248319 "sshd-session: zuul@notty"
               ├─session-130.scope
               │ ├─248418 "sshd-session: zuul [priv]"
               │ └─248421 "sshd-session: zuul@notty"
               ├─session-131.scope
               │ ├─248494 "sshd-session: zuul [priv]"
               │ └─248497 "sshd-session: zuul@notty"
               ├─session-133.scope
               │ ├─248553 "sshd-session: zuul [priv]"
               │ └─248568 "sshd-session: zuul@notty"
               ├─session-134.scope
               │ ├─248856 "sshd-session: zuul [priv]"
               │ └─248883 "sshd-session: zuul@notty"
               ├─session-136.scope
               │ ├─249050 "sshd-session: zuul [priv]"
               │ └─249053 "sshd-session: zuul@notty"
               ├─session-158.scope
               │ ├─264277 "sshd-session: zuul [priv]"
               │ ├─264280 "sshd-session: zuul@notty"
               │ ├─264281 sudo bash -c "rm -rf /var/tmp/sos-osp && mkdir /var/tmp/sos-osp && sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp  -p container,openstack_edpm,system,storage,virt"
               │ ├─264314 /usr/bin/python3 -s /sbin/sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp -p container,openstack_edpm,system,storage,virt
               │ ├─267249 timeout 15s turbostat --debug sleep 10
               │ ├─267669 timeout 300s systemctl status --all
               │ ├─267670 systemctl status --all
               │ ├─267671 timeout 300s semanage login -l
               │ ├─267672 /usr/bin/python3 -s /sbin/sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp -p container,openstack_edpm,system,storage,virt
               │ └─267673 /usr/bin/python3 -EsI /usr/sbin/semanage login -l
               ├─session-48.scope
               │ ├─240767 "sshd-session: zuul [priv]"
               │ └─240770 "sshd-session: zuul@notty"
               ├─session-50.scope
               │ ├─240930 "sshd-session: zuul [priv]"
               │ └─240933 "sshd-session: zuul@notty"
               ├─session-51.scope
               │ ├─241003 "sshd-session: zuul [priv]"
               │ └─241006 "sshd-session: zuul@notty"
               ├─session-53.scope
               │ ├─241063 "sshd-session: zuul [priv]"
               │ └─241091 "sshd-session: zuul@notty"
               ├─session-54.scope
               │ ├─241146 "sshd-session: zuul [priv]"
               │ └─241149 "sshd-session: zuul@notty"
               ├─session-56.scope
               │ ├─241295 "sshd-session: zuul [priv]"
               │ └─241298 "sshd-session: zuul@notty"
               ├─session-57.scope
               │ ├─241344 "sshd-session: zuul [priv]"
               │ └─241347 "sshd-session: zuul@notty"
               ├─session-59.scope
               │ ├─241444 "sshd-session: zuul [priv]"
               │ └─241447 "sshd-session: zuul@notty"
               ├─session-60.scope
               │ ├─241519 "sshd-session: zuul [priv]"
               │ └─241522 "sshd-session: zuul@notty"
               ├─session-62.scope
               │ ├─241662 "sshd-session: zuul [priv]"
               │ └─241665 "sshd-session: zuul@notty"
               ├─session-68.scope
               │ ├─242466 "sshd-session: zuul [priv]"
               │ └─242469 "sshd-session: zuul@notty"
               ├─session-70.scope
               │ ├─242564 "sshd-session: zuul [priv]"
               │ └─242586 "sshd-session: zuul@notty"
               ├─session-71.scope
               │ ├─242638 "sshd-session: zuul [priv]"
               │ └─242641 "sshd-session: zuul@notty"
               ├─session-73.scope
               │ ├─242742 "sshd-session: zuul [priv]"
               │ └─242745 "sshd-session: zuul@notty"
               ├─session-74.scope
               │ ├─242772 "sshd-session: zuul [priv]"
               │ └─242775 "sshd-session: zuul@notty"
               ├─session-76.scope
               │ ├─242832 "sshd-session: zuul [priv]"
               │ └─242835 "sshd-session: zuul@notty"
               ├─session-79.scope
               │ ├─243425 "sshd-session: zuul [priv]"
               │ └─243428 "sshd-session: zuul@notty"
               ├─session-81.scope
               │ ├─243524 "sshd-session: zuul [priv]"
           Unit boot.automount could not be found.
    │ └─243527 "sshd-session: zuul@notty"
               ├─session-82.scope
               │ ├─243554 "sshd-session: zuul [priv]"
               │ └─243557 "sshd-session: zuul@notty"
               ├─session-84.scope
               │ ├─243613 "sshd-session: zuul [priv]"
               │ └─243616 "sshd-session: zuul@notty"
               ├─session-85.scope
               │ ├─243643 "sshd-session: zuul [priv]"
               │ └─243668 "sshd-session: zuul@notty"
               ├─session-87.scope
               │ ├─243790 "sshd-session: zuul [priv]"
               │ └─243793 "sshd-session: zuul@notty"
               ├─session-94.scope
               │ ├─244430 "sshd-session: zuul [priv]"
               │ └─244433 "sshd-session: zuul@notty"
               ├─session-96.scope
               │ ├─244575 "sshd-session: zuul [priv]"
               │ └─244578 "sshd-session: zuul@notty"
               ├─session-97.scope
               │ ├─244785 "sshd-session: zuul [priv]"
               │ └─244807 "sshd-session: zuul@notty"
               ├─session-99.scope
               │ ├─244996 "sshd-session: zuul [priv]"
               │ └─244999 "sshd-session: zuul@notty"
               └─user@1000.service
                 ├─app.slice
                 │ └─dbus-broker.service
                 │   ├─15953 /usr/bin/dbus-broker-launch --scope user
                 │   └─15968 dbus-broker --log 4 --controller 9 --machine-id bf0bc0bb03de29b24cba1cc9599cf5d0 --max-bytes 100000000000000 --max-fds 25000000000000 --max-matches 5000000000
                 ├─init.scope
                 │ ├─4307 /usr/lib/systemd/systemd --user
                 │ └─4309 "(sd-pam)"
                 └─user.slice
                   └─podman-pause-bff822aa.scope
                     └─15896 catatonit -P

● proc-sys-fs-binfmt_misc.automount - Arbitrary Executable File Formats File System Automount Point
     Loaded: loaded (/usr/lib/systemd/system/proc-sys-fs-binfmt_misc.automount; static)
     Active: active (running) since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
   Triggers: ● proc-sys-fs-binfmt_misc.mount
      Where: /proc/sys/fs/binfmt_misc
       Docs: https://docs.kernel.org/admin-guide/binfmt-misc.html
             https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems

Jan 30 20:21:01 compute-0 systemd[1]: proc-sys-fs-binfmt_misc.automount: Got automount request for /proc/sys/fs/binfmt_misc, triggered by 264314 (sos)

● dev-cdrom.device - QEMU_DVD-ROM config-2
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:01.1-ata1-host0-target0:0:0-0:0:0:0-block-sr0.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
     Device: /sys/devices/pci0000:00/0000:00:01.1/ata1/host0/target0:0:0/0:0:0:0/block/sr0

● dev-disk-by\x2ddiskseq-1.device - /dev/disk/by-diskseq/1
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:04.0-virtio2-block-vda.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
     Device: /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda

● dev-disk-by\x2ddiskseq-3.device - QEMU_DVD-ROM config-2
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:01.1-ata1-host0-target0:0:0-0:0:0:0-block-sr0.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
     Device: /sys/devices/pci0000:00/0000:00:01.1/ata1/host0/target0:0:0/0:0:0:0/block/sr0

● dev-disk-by\x2did-ata\x2dQEMU_DVD\x2dROM_QM00001.device - QEMU_DVD-ROM config-2
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:01.1-ata1-host0-target0:0:0-0:0:0:0-block-sr0.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
     Device: /sys/devices/pci0000:00/0000:00:01.1/ata1/host0/target0:0:0/0:0:0:0/block/sr0

● dev-disk-by\x2dlabel-config\x2d2.device - QEMU_DVD-ROM config-2
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:01.1-ata1-host0-target0:0:0-0:0:0:0-block-sr0.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
     Device: /sys/devices/pci0000:00/0000:00:01.1/ata1/host0/target0:0:0/0:0:0:0/block/sr0

● dev-disk-by\x2dpartuuid-714daac1\x2d01.device - /dev/disk/by-partuuid/714daac1-01
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:04.0-virtio2-block-vda-vda1.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
     Device: /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda/vda1

● dev-disk-by\x2dpath-pci\x2d0000:00:01.1\x2data\x2d1.0.device - QEMU_DVD-ROM config-2
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:01.1-ata1-host0-target0:0:0-0:0:0:0-block-sr0.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
     Device: /sys/devices/pci0000:00/0000:00:01.1/ata1/host0/target0:0:0/0:0:0:0/block/sr0

● dev-disk-by\x2dpath-pci\x2d0000:00:01.1\x2data\x2d1.device - QEMU_DVD-ROM config-2
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:01.1-ata1-host0-target0:0:0-0:0:0:0-block-sr0.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
     Device: /sys/devices/pci0000:00/0000:00:01.1/ata1/host0/target0:0:0/0:0:0:0/block/sr0

● dev-disk-by\x2dpath-pci\x2d0000:00:04.0.device - /dev/disk/by-path/pci-0000:00:04.0
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:04.0-virtio2-block-vda.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
     Device: /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda

● dev-disk-by\x2dpath-pci\x2d0000:00:04.0\x2dpart1.device - /dev/disk/by-path/pci-0000:00:04.0-part1
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:04.0-virtio2-block-vda-vda1.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
     Device: /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda/vda1

● dev-disk-by\x2dpath-virtio\x2dpci\x2d0000:00:04.0.device - /dev/disk/by-path/virtio-pci-0000:00:04.0
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:04.0-virtio2-block-vda.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
     Device: /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda

● dev-disk-by\x2dpath-virtio\x2dpci\x2d0000:00:04.0\x2dpart1.device - /dev/disk/by-path/virtio-pci-0000:00:04.0-part1
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:04.0-virtio2-block-vda-vda1.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
     Device: /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda/vda1

● dev-disk-by\x2duuid-2026\x2d01\x2d30\x2d16\x2d49\x2d31\x2d00.device - QEMU_DVD-ROM config-2
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:01.1-ata1-host0-target0:0:0-0:0:0:0-block-sr0.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
     Device: /sys/devices/pci0000:00/0000:00:01.1/ata1/host0/target0:0:0/0:0:0:0/block/sr0

● dev-disk-by\x2duuid-822f14ea\x2d6e7e\x2d41df\x2db0d8\x2dfbe282d9ded8.device - /dev/disk/by-uuid/822f14ea-6e7e-41df-b0d8-fbe282d9ded8
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:04.0-virtio2-block-vda-vda1.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
     Device: /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda/vda1

Jan 30 16:49:43 localhost systemd[1]: Found device /dev/disk/by-uuid/822f14ea-6e7e-41df-b0d8-fbe282d9ded8.

● dev-rfkill.device - /dev/rfkill
    Follows: unit currently follows state of sys-devices-virtual-misc-rfkill.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:49 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:49 UTC; 3h 31min ago
     Device: /sys/devices/virtual/misc/rfkill

● dev-sr0.device - QEMU_DVD-ROM config-2
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:01.1-ata1-host0-target0:0:0-0:0:0:0-block-sr0.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
     Device: /sys/devices/pci0000:00/0000:00:01.1/ata1/host0/target0:0:0/0:0:0:0/block/sr0

● dev-ttyS0.device - /dev/ttyS0
    Follows: unit currently follows state of sys-devices-pnp0-00:00-tty-ttyS0.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
     Device: /sys/devices/pnp0/00:00/tty/ttyS0

Jan 30 16:49:47 localhost systemd[1]: Condition check resulted in /dev/ttyS0 being skipped.

● dev-ttyS1.device - /dev/ttyS1
    Follows: unit currently follows state of sys-devices-platform-serial8250-tty-ttyS1.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
     Device: /sys/devices/platform/serial8250/tty/ttyS1

● dev-ttyS2.device - /dev/ttyS2
    Follows: unit currently follows state of sys-devices-platform-serial8250-tty-ttyS2.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
     Device: /sys/devices/platform/serial8250/tty/ttyS2

● dev-ttyS3.device - /dev/ttyS3
    Follows: unit currently follows state of sys-devices-platform-serial8250-tty-ttyS3.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
     Device: /sys/devices/platform/serial8250/tty/ttyS3

● dev-vda.device - /dev/vda
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:04.0-virtio2-block-vda.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
     Device: /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda

● dev-vda1.device - /dev/vda1
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:04.0-virtio2-block-vda-vda1.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
     Device: /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda/vda1

● sys-devices-pci0000:00-0000:00:01.1-ata1-host0-target0:0:0-0:0:0:0-block-sr0.device - QEMU_DVD-ROM config-2
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
     Device: /sys/devices/pci0000:00/0000:00:01.1/ata1/host0/target0:0:0/0:0:0:0/block/sr0

● sys-devices-pci0000:00-0000:00:03.0-virtio1-net-eth0.device - Virtio network device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
     Device: /sys/devices/pci0000:00/0000:00:03.0/virtio1/net/eth0

● sys-devices-pci0000:00-0000:00:04.0-virtio2-block-vda-vda1.device - /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda/vda1
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
     Device: /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda/vda1

● sys-devices-pci0000:00-0000:00:04.0-virtio2-block-vda.device - /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
     Device: /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda

● sys-devices-pci0000:00-0000:00:07.0-virtio5-net-eth1.device - Virtio network device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:51:52 UTC; 3h 29min ago
      Until: Fri 2026-01-30 16:51:52 UTC; 3h 29min ago
     Device: /sys/devices/pci0000:00/0000:00:07.0/virtio5/net/eth1

● sys-devices-platform-serial8250-tty-ttyS1.device - /sys/devices/platform/serial8250/tty/ttyS1
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
     Device: /sys/devices/platform/serial8250/tty/ttyS1

● sys-devices-platform-serial8250-tty-ttyS2.device - /sys/devices/platform/serial8250/tty/ttyS2
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
     Device: /sys/devices/platform/serial8250/tty/ttyS2

● sys-devices-platform-serial8250-tty-ttyS3.device - /sys/devices/platform/serial8250/tty/ttyS3
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
     Device: /sys/devices/platform/serial8250/tty/ttyS3

● sys-devices-pnp0-00:00-tty-ttyS0.device - /sys/devices/pnp0/00:00/tty/ttyS0
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
     Device: /sys/devices/pnp0/00:00/tty/ttyS0

● sys-devices-virtual-misc-rfkill.device - /sys/devices/virtual/misc/rfkill
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:49 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:49 UTC; 3h 31min ago
     Device: /sys/devices/virtual/misc/rfkill

● sys-devices-virtual-net-br\x2dex.device - /sys/devices/virtual/net/br-ex
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
      Until: Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
     Device: /sys/devices/virtual/net/br-ex

● sys-devices-virtual-net-br\x2dint.device - /sys/devices/virtual/net/br-int
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:34:06 UTC; 2h 47min ago
      Until: Fri 2026-01-30 17:34:06 UTC; 2h 47min ago
     Device: /sys/devices/virtual/net/br-int

● sys-devices-virtual-net-genev_sys_6081.device - /sys/devices/virtual/net/genev_sys_6081
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:34:06 UTC; 2h 47min ago
      Until: Fri 2026-01-30 17:34:06 UTC; 2h 47min ago
     Device: /sys/devices/virtual/net/genev_sys_6081

● sys-devices-virtual-net-ovs\x2dsystem.device - /sys/devices/virtual/net/ovs-system
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
      Until: Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
     Device: /sys/devices/virtual/net/ovs-system

● sys-devices-virtual-net-tap0251ef32\x2dee.device - /sys/devices/virtual/net/tap0251ef32-ee
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:59:38 UTC; 2h 21min ago
      Until: Fri 2026-01-30 17:59:38 UTC; 2h 21min ago
     Device: /sys/devices/virtual/net/tap0251ef32-ee

● sys-devices-virtual-net-tapcc412279\x2d19.device - /sys/devices/virtual/net/tapcc412279-19
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:59:43 UTC; 2h 21min ago
      Until: Fri 2026-01-30 17:59:43 UTC; 2h 21min ago
     Device: /sys/devices/virtual/net/tapcc412279-19

● sys-devices-virtual-net-vlan20.device - /sys/devices/virtual/net/vlan20
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
      Until: Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
     Device: /sys/devices/virtual/net/vlan20

● sys-devices-virtual-net-vlan21.device - /sys/devices/virtual/net/vlan21
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
      Until: Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
     Device: /sys/devices/virtual/net/vlan21

● sys-devices-virtual-net-vlan22.device - /sys/devices/virtual/net/vlan22
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
      Until: Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
     Device: /sys/devices/virtual/net/vlan22

● sys-module-configfs.device - /sys/module/configfs
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
     Device: /sys/module/configfs

● sys-module-fuse.device - /sys/module/fuse
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
     Device: /sys/module/fuse

● sys-subsystem-net-devices-br\x2dex.device - /sys/subsystem/net/devices/br-ex
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
      Until: Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
     Device: /sys/devices/virtual/net/br-ex

● sys-subsystem-net-devices-br\x2dint.device - /sys/subsystem/net/devices/br-int
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:34:06 UTC; 2h 47min ago
      Until: Fri 2026-01-30 17:34:06 UTC; 2h 47min ago
     Device: /sys/devices/virtual/net/br-int

● sys-subsystem-net-devices-eth0.device - Virtio network device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
     Device: /sys/devices/pci0000:00/0000:00:03.0/virtio1/net/eth0

● sys-subsystem-net-devices-eth1.device - Virtio network device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:51:52 UTC; 3h 29min ago
      Until: Fri 2026-01-30 16:51:52 UTC; 3h 29min ago
     Device: /sys/devices/pci0000:00/0000:00:07.0/virtio5/net/eth1

● sys-subsystem-net-devices-genev_sys_6081.device - /sys/subsystem/net/devices/genev_sys_6081
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:34:06 UTC; 2h 47min ago
      Until: Fri 2026-01-30 17:34:06 UTC; 2h 47min ago
     Device: /sys/devices/virtual/net/genev_sys_6081

● sys-subsystem-net-devices-ovs\x2dsystem.device - /sys/subsystem/net/devices/ovs-system
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
      Until: Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
     Device: /sys/devices/virtual/net/ovs-system

● sys-subsystem-net-devices-tap0251ef32\x2dee.device - /sys/subsystem/net/devices/tap0251ef32-ee
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:59:38 UTC; 2h 21min ago
      Until: Fri 2026-01-30 17:59:38 UTC; 2h 21min ago
     Device: /sys/devices/virtual/net/tap0251ef32-ee

● sys-subsystem-net-devices-tapcc412279\x2d19.device - /sys/subsystem/net/devices/tapcc412279-19
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:59:43 UTC; 2h 21min ago
      Until: Fri 2026-01-30 17:59:43 UTC; 2h 21min ago
     Device: /sys/devices/virtual/net/tapcc412279-19

● sys-subsystem-net-devices-vlan20.device - /sys/subsystem/net/devices/vlaUnit boot.mount could not be found.
Unit home.mount could not be found.
n20
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
      Until: Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
     Device: /sys/devices/virtual/net/vlan20

● sys-subsystem-net-devices-vlan21.device - /sys/subsystem/net/devices/vlan21
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
      Until: Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
     Device: /sys/devices/virtual/net/vlan21

● sys-subsystem-net-devices-vlan22.device - /sys/subsystem/net/devices/vlan22
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
      Until: Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
     Device: /sys/devices/virtual/net/vlan22

● -.mount - Root Mount
     Loaded: loaded (/etc/fstab; generated)
     Active: active (mounted)
      Where: /
       What: /dev/vda1
       Docs: man:fstab(5)
             man:systemd-fstab-generator(8)

● dev-hugepages.mount - Huge Pages File System
     Loaded: loaded (/usr/lib/systemd/system/dev-hugepages.mount; static)
     Active: active (mounted) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Where: /dev/hugepages
       What: hugetlbfs
       Docs: https://docs.kernel.org/admin-guide/mm/hugetlbpage.html
             https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 52.0K (peak: 556.0K)
        CPU: 4ms
     CGroup: /dev-hugepages.mount

● dev-hugepages1G.mount - /dev/hugepages1G
     Loaded: loaded (/etc/fstab; generated)
     Active: active (mounted) since Fri 2026-01-30 17:31:46 UTC; 2h 49min ago
      Until: Fri 2026-01-30 17:31:46 UTC; 2h 49min ago
      Where: /dev/hugepages1G
       What: none
       Docs: man:fstab(5)
             man:systemd-fstab-generator(8)

● dev-hugepages2M.mount - /dev/hugepages2M
     Loaded: loaded (/etc/fstab; generated)
     Active: active (mounted) since Fri 2026-01-30 17:31:47 UTC; 2h 49min ago
      Until: Fri 2026-01-30 17:31:47 UTC; 2h 49min ago
      Where: /dev/hugepages2M
       What: none
       Docs: man:fstab(5)
             man:systemd-fstab-generator(8)

● dev-mqueue.mount - POSIX Message Queue File System
     Loaded: loaded (/usr/lib/systemd/system/dev-mqueue.mount; static)
     Active: active (mounted) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Where: /dev/mqueue
       What: mqueue
       Docs: man:mq_overview(7)
             https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 4.0K (peak: 540.0K)
        CPU: 2ms
     CGroup: /dev-mqueue.mount

○ proc-fs-nfsd.mount - NFSD configuration filesystem
     Loaded: loaded (/usr/lib/systemd/system/proc-fs-nfsd.mount; static)
     Active: inactive (dead)
      Where: /proc/fs/nfsd
       What: nfsd

● proc-sys-fs-binfmt_misc.mount - Arbitrary Executable File Formats File System
     Loaded: loaded (/usr/lib/systemd/system/proc-sys-fs-binfmt_misc.mount; disabled; preset: disabled)
     Active: active (mounted) since Fri 2026-01-30 20:21:01 UTC; 17s ago
      Until: Fri 2026-01-30 20:21:01 UTC; 17s ago
TriggeredBy: ● proc-sys-fs-binfmt_misc.automount
      Where: /proc/sys/fs/binfmt_misc
       What: binfmt_misc
       Docs: https://docs.kernel.org/admin-guide/binfmt-misc.html
             https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 8.0K (peak: 544.0K)
        CPU: 6ms
     CGroup: /proc-sys-fs-binfmt_misc.mount

Jan 30 20:21:01 compute-0 systemd[1]: Mounting Arbitrary Executable File Formats File System...
Jan 30 20:21:01 compute-0 systemd[1]: Mounted Arbitrary Executable File Formats File System.

● run-credentials-systemd\x2dsysctl.service.mount - /run/credentials/systemd-sysctl.service
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:26:33 UTC; 2h 54min ago
      Until: Fri 2026-01-30 17:26:33 UTC; 2h 54min ago
      Where: /run/credentials/systemd-sysctl.service
       What: none

● run-credentials-systemd\x2dsysusers.service.mount - /run/credentials/systemd-sysusers.service
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Where: /run/credentials/systemd-sysusers.service
       What: none

● run-credentials-systemd\x2dtmpfiles\x2dsetup.service.mount - /run/credentials/systemd-tmpfiles-setup.service
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Where: /run/credentials/systemd-tmpfiles-setup.service
       What: none

● run-credentials-systemd\x2dtmpfiles\x2dsetup\x2ddev.service.mount - /run/credentials/systemd-tmpfiles-setup-dev.service
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Where: /run/credentials/systemd-tmpfiles-setup-dev.service
       What: none

● run-netns.mount - /run/netns
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:31:16 UTC; 2h 50min ago
      Until: Fri 2026-01-30 17:31:16 UTC; 2h 50min ago
      Where: /run/netns
       What: tmpfs

● run-user-1000.mount - /run/user/1000
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 16:50:03 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:50:03 UTC; 3h 31min ago
      Where: /run/user/1000
       What: tmpfs

● sys-fs-fuse-connections.mount - FUSE Control File System
     Loaded: loaded (/usr/lib/systemd/system/sys-fs-fuse-connections.mount; static)
     Active: active (mounted) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Where: /sys/fs/fuse/connections
       What: fusectl
       Docs: https://docs.kernel.org/filesystems/fuse.html
             https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 4.0K (peak: 540.0K)
        CPU: 3ms
     CGroup: /sys-fs-fuse-connections.mount

Jan 30 16:49:47 localhost systemd[1]: Mounting FUSE Control File System...
Jan 30 16:49:47 localhost systemd[1]: Mounted FUSE Control File System.

● sys-kernel-config.mount - Kernel Configuration File System
     Loaded: loaded (/proc/self/mountinfo; static)
     Active: active (mounted) since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Where: /sys/kernel/config
       What: configfs
       Docs: https://docs.kernel.org/filesystems/configfs.html
             https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems

● sys-kernel-debug-tracing.mount - /sys/kernel/debug/tracing
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 20:21:00 UTC; 18s ago
      Until: Fri 2026-01-30 20:21:00 UTC; 18s ago
      Where: /sys/kernel/debug/tracing
       What: tracefs

● sys-kernel-debug.mount - Kernel Debug File System
     Loaded: loaded (/usr/lib/systemd/system/sys-kernel-debug.mount; static)
     Active: active (mounted) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Where: /sys/kernel/debug
       What: debugfs
       Docs: https://docs.kernel.org/filesystems/debugfs.html
             https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 4.0K (peak: 540.0K)
        CPU: 2ms
     CGroup: /sys-kernel-debug.mount

● sys-kernel-tracing.mount - Kernel Trace File System
     Loaded: loaded (/usr/lib/systemd/system/sys-kernel-tracing.mount; static)
     Active: active (mounted) since Fri Unit sysroot.mount could not be found.
2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Where: /sys/kernel/tracing
       What: tracefs
       Docs: https://docs.kernel.org/trace/ftrace.html
             https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 4.0K (peak: 540.0K)
        CPU: 3ms
     CGroup: /sys-kernel-tracing.mount

○ tmp.mount - Temporary Directory /tmp
     Loaded: loaded (/usr/lib/systemd/system/tmp.mount; disabled; preset: disabled)
     Active: inactive (dead)
      Where: /tmp
       What: tmpfs
       Docs: https://systemd.io/TEMPORARY_DIRECTORIES
             man:file-hierarchy(7)
             https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems

● var-lib-containers-storage-overlay-5fdc93ef6c5e9d3749bf00f2324e11977e1968e4c60088c65a21ba88ea18b91b-merged.mount - /var/lib/containers/storage/overlay/5fdc93ef6c5e9d3749bf00f2324e11977e1968e4c60088c65a21ba88ea18b91b/merged
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:35:02 UTC; 2h 46min ago
      Until: Fri 2026-01-30 17:35:02 UTC; 2h 46min ago
      Where: /var/lib/containers/storage/overlay/5fdc93ef6c5e9d3749bf00f2324e11977e1968e4c60088c65a21ba88ea18b91b/merged
       What: overlay

● var-lib-containers-storage-overlay-7a25b82810da06ed86974bd390744b02913a8b8654a2fab46ab9825a95d4b70e-merged.mount - /var/lib/containers/storage/overlay/7a25b82810da06ed86974bd390744b02913a8b8654a2fab46ab9825a95d4b70e/merged
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:44:14 UTC; 2h 37min ago
      Until: Fri 2026-01-30 17:44:14 UTC; 2h 37min ago
      Where: /var/lib/containers/storage/overlay/7a25b82810da06ed86974bd390744b02913a8b8654a2fab46ab9825a95d4b70e/merged
       What: overlay

● var-lib-containers-storage-overlay-7ad7dfd74c4aa71483ae9a8289e985b4a81e37c6bd84efe0ed586d2ae3181d3f-merged.mount - /var/lib/containers/storage/overlay/7ad7dfd74c4aa71483ae9a8289e985b4a81e37c6bd84efe0ed586d2ae3181d3f/merged
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:43:22 UTC; 2h 37min ago
      Until: Fri 2026-01-30 17:43:22 UTC; 2h 37min ago
      Where: /var/lib/containers/storage/overlay/7ad7dfd74c4aa71483ae9a8289e985b4a81e37c6bd84efe0ed586d2ae3181d3f/merged
       What: overlay

● var-lib-containers-storage-overlay-975e6501935eccd6be01da9dbae91e373703b4c4c1c3e05b2d02e2cc790e784e-merged.mount - /var/lib/containers/storage/overlay/975e6501935eccd6be01da9dbae91e373703b4c4c1c3e05b2d02e2cc790e784e/merged
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:34:05 UTC; 2h 47min ago
      Until: Fri 2026-01-30 17:34:05 UTC; 2h 47min ago
      Where: /var/lib/containers/storage/overlay/975e6501935eccd6be01da9dbae91e373703b4c4c1c3e05b2d02e2cc790e784e/merged
       What: overlay

● var-lib-containers-storage-overlay-9dec011751432db70426d9440e77026880f511ffddc6b694c03e0c033ef2dc7e-merged.mount - /var/lib/containers/storage/overlay/9dec011751432db70426d9440e77026880f511ffddc6b694c03e0c033ef2dc7e/merged
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:42:22 UTC; 2h 38min ago
      Until: Fri 2026-01-30 17:42:22 UTC; 2h 38min ago
      Where: /var/lib/containers/storage/overlay/9dec011751432db70426d9440e77026880f511ffddc6b694c03e0c033ef2dc7e/merged
       What: overlay

● var-lib-containers-storage-overlay-a8fd9a9865b1445d3f4cfd349965a94da2b2061a649348853ea33302d20fcc06-merged.mount - /var/lib/containers/storage/overlay/a8fd9a9865b1445d3f4cfd349965a94da2b2061a649348853ea33302d20fcc06/merged
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:43:36 UTC; 2h 37min ago
      Until: Fri 2026-01-30 17:43:36 UTC; 2h 37min ago
      Where: /var/lib/containers/storage/overlay/a8fd9a9865b1445d3f4cfd349965a94da2b2061a649348853ea33302d20fcc06/merged
       What: overlay

● var-lib-containers-storage-overlay-a9798dbcb4c7db3588ac0972a724ab501de0cff6b129d1565f7e5b601a991067-merged.mount - /var/lib/containers/storage/overlay/a9798dbcb4c7db3588ac0972a724ab501de0cff6b129d1565f7e5b601a991067/merged
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:43:54 UTC; 2h 37min ago
      Until: Fri 2026-01-30 17:43:54 UTC; 2h 37min ago
      Where: /var/lib/containers/storage/overlay/a9798dbcb4c7db3588ac0972a724ab501de0cff6b129d1565f7e5b601a991067/merged
       What: overlay

● var-lib-containers-storage-overlay.mount - /var/lib/containers/storage/overlay
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:34:05 UTC; 2h 47min ago
      Until: Fri 2026-01-30 17:34:05 UTC; 2h 47min ago
      Where: /var/lib/containers/storage/overlay
       What: /dev/vda1

● var-lib-containers-storage-overlay\x2dcontainers-573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b-userdata-shm.mount - /var/lib/containers/storage/overlay-containers/573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b/userdata/shm
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:35:02 UTC; 2h 46min ago
      Until: Fri 2026-01-30 17:35:02 UTC; 2h 46min ago
      Where: /var/lib/containers/storage/overlay-containers/573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b/userdata/shm
       What: shm

● var-lib-containers-storage-overlay\x2dcontainers-60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5-userdata-shm.mount - /var/lib/containers/storage/overlay-containers/60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5/userdata/shm
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:43:22 UTC; 2h 37min ago
      Until: Fri 2026-01-30 17:43:22 UTC; 2h 37min ago
      Where: /var/lib/containers/storage/overlay-containers/60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5/userdata/shm
       What: shm

● var-lib-containers-storage-overlay\x2dcontainers-60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97-userdata-shm.mount - /var/lib/containers/storage/overlay-containers/60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97/userdata/shm
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:34:05 UTC; 2h 47min ago
      Until: Fri 2026-01-30 17:34:05 UTC; 2h 47min ago
      Where: /var/lib/containers/storage/overlay-containers/60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97/userdata/shm
       What: shm

● var-lib-containers-storage-overlay\x2dcontainers-8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a-userdata-shm.mount - /var/lib/containers/storage/overlay-containers/8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a/userdata/shm
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:44:14 UTC; 2h 37min ago
      Until: Fri 2026-01-30 17:44:14 UTC; 2h 37min ago
      Where: /var/lib/containers/storage/overlay-containers/8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a/userdata/shm
       What: shm

● var-lib-containers-storage-overlay\x2dcontainers-925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e-userdata-shm.mount - /var/lib/containers/storage/overlay-containers/925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e/userdata/shm
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:42:22 UTC; 2h 38min ago
      Until: Fri 2026-01-30 17:42:22 UTC; 2h 38min ago
      Where: /var/lib/containers/storage/overlay-containers/925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e/userdata/shm
       What: shm

● var-lib-containers-storage-overlay\x2dcontainers-ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735-userdata-shm.mount - /var/lib/containers/storage/overlay-containers/ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735/userdata/shm
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:43:54 UTC; 2h 37min ago
      Until: Fri 2026-01-30 17:43:54 UTC; 2h 37min ago
      Where: /var/lib/containers/storage/overlay-containers/ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735/userdata/shm
       What: shm

● var-lib-containers-storage-overlay\x2dcontainers-e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0-userdata-shm.mount - /var/lib/containers/storage/overlay-containers/e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0/userdata/shm
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:43:36 UTC; 2h 37min ago
      Until: Fri 2026-01-30 17:43:36 UTC; 2h 37min ago
      Where: /var/lib/containers/storage/overlay-containers/e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0/userdata/shm
       What: shm

○ var-lib-machines.mount - Virtual Machine and Container Storage (Compatibility)
     Loaded: loaded (/usr/lib/systemd/system/var-lib-machines.mount; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 17:39:33 UTC; 2h 41min ago
      Where: /var/lib/machines
       What: /var/lib/machines.raw

Jan 30 17:39:33 compute-0 systemd[1]: Virtual Machine and Container Storage (Compatibility) was skipped because of an unmet condition check (ConditionPathExists=/var/lib/machines.raw).

● var-lib-nfs-rpc_pipefs.mount - RPC Pipe File System
     Loaded: loaded (/proc/self/mountinfo; static)
     Active: active (mounted) since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Where: /var/lib/nfs/rpc_pipefs
       What: rpc_pipefs

● systemd-ask-password-console.path - Dispatch Password Requests to Console Directory Watch
     Loaded: loaded (/usr/lib/systemd/system/systemd-ask-password-console.path; static)
     Active: active (waiting) since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
   Triggers: ● systemd-ask-password-console.service
       Docs: man:systemd-ask-password-console.path(8)

● systemd-ask-password-wall.path - Forward Password Requests to Wall Directory Watch
     Loaded: loaded (/usr/lib/systemd/system/systemd-ask-password-wall.path; static)
     Active: active (waiting) since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
   Triggers: ● systemd-ask-password-wall.service
       Docs: man:systemd-ask-password-wall.path(8)

● init.scope - System and Service Manager
     Loaded: loaded
  Transient: yes
     Active: active (running) since Fri 2026-01-30 16:49:42 UTC; 3h 31min ago
       Docs: man:systemd(1)
         IO: 476.0K read, 0B written
      Tasks: 1 (limit: 100092)
     Memory: 42.7M (peak: 59.9M)
        CPU: 47.539s
     CGroup: /init.scope
             └─1 /usr/lib/systemd/systemd --switched-root --system --deserialize 31

Jan 30 20:20:56 compute-0 systemd[1]: Started Session 158 of User zuul.
Jan 30 20:21:01 compute-0 systemd[1]: proc-sys-fs-binfmt_misc.automount: Got automount request for /proc/sys/fs/binfmt_misc, triggered by 264314 (sos)
Jan 30 20:21:01 compute-0 systemd[1]: Mounting Arbitrary Executable File Formats File System...
Jan 30 20:21:01 compute-0 systemd[1]: Mounted Arbitrary Executable File Formats File System.
Jan 30 20:21:03 compute-0 systemd[1]: Started Session 159 of User zuul.
Jan 30 20:21:04 compute-0 systemd[1]: Started Session 160 of User zuul.
Jan 30 20:21:04 compute-0 systemd[1]: session-159.scope: Deactivated successfully.
Jan 30 20:21:04 compute-0 systemd[1]: session-160.scope: Deactivated successfully.
Jan 30 20:21:04 compute-0 systemd[1]: Starting Hostname Service...
Jan 30 20:21:04 compute-0 systemd[1]: Started Hostname Service.

● libpod-573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b.scope - libcrun container
     Loaded: loaded (/run/systemd/transient/libpod-573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b.scope; transient)
  Transient: yes
    Drop-In: /run/systemd/transient/libpod-573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b.scope.d
             └─dep.conf
     Active: active (running) since Fri 2026-01-30 17:35:02 UTC; 2h 46min ago
         IO: 0B read, 25.2M written
      Tasks: 11 (limit: 4096)
     Memory: 422.2M (peak: 465.3M)
        CPU: 50.492s
     CGroup: /machine.slice/libpod-573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b.scope
             └─container
               ├─104655 dumb-init --single-child -- kolla_start
               ├─104658 "neutron-ovn-metadata-agent (/usr/bin/python3 /usr/bin/neutron-ovn-metadata-agent)"
               ├─104880 "neutron-ovn-metadata-agent (/usr/bin/python3 /usr/bin/neutron-ovn-metadata-agent)"
               ├─104920 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.namespace_cmd --privsep_sock_path /tmp/tmpp73u3fin/privsep.sock
               ├─212547 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.default --privsep_sock_path /tmp/tmpr5q8om25/privsep.sock
               └─212743 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.link_cmd --privsep_sock_path /tmp/tmp3la6aon7/privsep.sock

Jan 30 20:05:06 compute-0 podman[259674]: 2026-01-30 20:05:06.271646824 +0000 UTC m=+0.045759610 container died 52d5d6e22205021091572630c3b30bb1aa090721bbc2c9f277d3487451d2ecf1 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-8707e31d-f583-46c9-accb-4a15c80f0a90, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true, org.label-schema.build-date=20260127, org.label-schema.license=GPLv2, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, maintainer=OpenStack Kubernetes Operator team)
Jan 30 20:05:06 compute-0 podman[259674]: 2026-01-30 20:05:06.368677994 +0000 UTC m=+0.142790780 container cleanup 52d5d6e22205021091572630c3b30bb1aa090721bbc2c9f277d3487451d2ecf1 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-8707e31d-f583-46c9-accb-4a15c80f0a90, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, tcib_managed=true, org.label-schema.build-date=20260127, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Jan 30 20:05:06 compute-0 podman[259717]: 2026-01-30 20:05:06.441993466 +0000 UTC m=+0.052345264 container remove 52d5d6e22205021091572630c3b30bb1aa090721bbc2c9f277d3487451d2ecf1 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-8707e31d-f583-46c9-accb-4a15c80f0a90, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, org.label-schema.build-date=20260127)
Jan 30 20:07:07 compute-0 podman[260332]: 2026-01-30 20:07:07.159874324 +0000 UTC m=+0.037275920 container create 36586e9dffff858f22e42e79db799608fac647ab6e0fc6c469411c00674cfc27 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-68e01440-667d-4947-88bc-8bc355005035, org.label-schema.license=GPLv2, org.label-schema.build-date=20260127, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, tcib_managed=true)
Jan 30 20:07:07 compute-0 podman[260332]: 2026-01-30 20:07:07.139243368 +0000 UTC m=+0.016644914 image pull 19964fda6b912d3d57e21b0bcc221725d936e513025030cb508474fe04b06af8 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Jan 30 20:07:07 compute-0 podman[260332]: 2026-01-30 20:07:07.247252492 +0000 UTC m=+0.124654038 container init 36586e9dffff858f22e42e79db799608fac647ab6e0fc6c469411c00674cfc27 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-68e01440-667d-4947-88bc-8bc355005035, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20260127, org.label-schema.vendor=CentOS)
Jan 30 20:07:07 compute-0 podman[260332]: 2026-01-30 20:07:07.2564442 +0000 UTC m=+0.133845736 container start 36586e9dffff858f22e42e79db799608fac647ab6e0fc6c469411c00674cfc27 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-68e01440-667d-4947-88bc-8bc355005035, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.build-date=20260127, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Jan 30 20:15:29 compute-0 podman[262634]: 2026-01-30 20:15:29.957885888 +0000 UTC m=+0.058231360 container died 36586e9dffff858f22e42e79db799608fac647ab6e0fc6c469411c00674cfc27 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-68e01440-667d-4947-88bc-8bc355005035, tcib_managed=true, org.label-schema.build-date=20260127, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Jan 30 20:15:30 compute-0 podman[262634]: 2026-01-30 20:15:30.015390094 +0000 UTC m=+0.115735566 container cleanup 36586e9dffff858f22e42e79db799608fac647ab6e0fc6c469411c00674cfc27 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-68e01440-667d-4947-88bc-8bc355005035, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, org.label-schema.vendor=CentOS, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20260127, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Jan 30 20:15:30 compute-0 podman[262678]: 2026-01-30 20:15:30.077506034 +0000 UTC m=+0.046524504 container remove 36586e9dffff858f22e42e79db799608fac647ab6e0fc6c469411c00674cfc27 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-68e01440-667d-4947-88bc-8bc355005035, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20260127, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, org.label-schema.license=GPLv2)

● libpod-60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5.scope - libcrun container
     Loaded: loaded (/run/systemd/transient/libpod-60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 17:43:22 UTC; 2h 37min ago
         IO: 0B read, 232.0K written
      Tasks: 7 (limit: 4096)
     Memory: 105.2M (peak: 107.1M)
        CPU: 9.425s
     CGroup: /machine.slice/libpod-60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5.scope
             └─container
               ├─192801 dumb-init --single-child -- kolla_start
               ├─192804 "ceilometer-polling: master process [/usr/bin/ceilometer-polling --polling-namespaces compute --logfile /dev/stdout]"
               └─192931 "ceilometer-polling: AgentManager worker(0)"

Jan 30 17:43:22 compute-0 systemd[1]: Started libcrun container.
Jan 30 17:43:22 compute-0 sudo[192805]: ceilometer : PWD=/ ; USER=root ; COMMAND=/usr/local/bin/kolla_set_configs
Jan 30 17:43:22 compute-0 sudo[192805]: pam_systemd(sudo:session): Failed to connect to system bus: No such file or directory
Jan 30 17:43:22 compute-0 sudo[192805]: pam_unix(sudo:session): session opened for user root(uid=0) by (uid=42405)
Jan 30 17:43:22 compute-0 sudo[192805]: pam_unix(sudo:session): session closed for user root
Jan 30 17:43:22 compute-0 sudo[192827]: ceilometer : PWD=/ ; USER=root ; COMMAND=/usr/local/bin/kolla_copy_cacerts
Jan 30 17:43:22 compute-0 sudo[192827]: pam_systemd(sudo:session): Failed to connect to system bus: No such file or directory
Jan 30 17:43:22 compute-0 sudo[192827]: pam_unix(sudo:session): session opened for user root(uid=0) by (uid=42405)
Jan 30 17:43:22 compute-0 sudo[192827]: pam_unix(sudo:session): session closed for user root

● libpod-60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97.scope - libcrun container
     Loaded: loaded (/run/systemd/transient/libpod-60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97.scope; transient)
  Transient: yes
    Drop-In: /run/systemd/transient/libpod-60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97.scope.d
             └─dep.conf
     Active: active (running) since Fri 2026-01-30 17:34:05 UTC; 2h 47min ago
         IO: 0B read, 4.0K written
      Tasks: 6 (limit: 4096)
     Memory: 19.8M (peak: 24.5M)
        CPU: 20.121s
     CGroup: /machine.slice/libpod-60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97.scope
             └─container
               ├─95418 dumb-init --single-child -- kolla_start
               └─95421 /usr/bin/ovn-controller --pidfile unix:/run/openvswitch/db.sock -p /etc/pki/tls/private/ovndb.key -c /etc/pki/tls/certs/ovndb.crt -C /etc/pki/tls/certs/ovndbca.crt

Jan 30 17:34:05 compute-0 systemd[1]: Started libcrun container.

● libpod-8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a.scope - libcrun container
     Loaded: loaded (/run/systemd/transient/libpod-8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 17:44:14 UTC; 2h 37min ago
         IO: 0B read, 0B written
      Tasks: 6 (limit: 4096)
     Memory: 5.2M (peak: 7.3M)
        CPU: 3.198s
     CGroup: /machine.slice/libpod-8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a.scope
             └─container
               └─202050 /app/openstack-network-exporter

Jan 30 17:44:14 compute-0 systemd[1]: Started libcrun container.

● libpod-925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e.scope - libcrun container
     Loaded: loaded (/run/systemd/transient/libpod-925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 17:42:22 UTC; 2h 38min ago
         IO: 168.0K read, 2.2G written
      Tasks: 27 (limit: 4096)
     Memory: 244.3M (peak: 1.0G)
        CPU: 2min 32.113s
     CGroup: /machine.slice/libpod-925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e.scope
             └─container
               ├─183117 dumb-init --single-child -- kolla_start
               ├─183119 /usr/bin/python3 /usr/bin/nova-compute
               ├─212621 /usr/bin/python3 /bin/privsep-helper --config-file /etc/nova/nova.conf --config-file /etc/nova/nova-compute.conf --config-dir /etc/nova/nova.conf.d --privsep_context nova.privsep.sys_admin_pctxt --privsep_sock_path /tmp/tmp5s1ioym_/privsep.sock
               └─212642 /usr/bin/python3 /bin/privsep-helper --config-file /etc/nova/nova.conf --config-file /etc/nova/nova-compute.conf --config-dir /etc/nova/nova.conf.d --privsep_context vif_plug_ovs.privsep.vif_plug --privsep_sock_path /tmp/tmpmo51pq8n/privsep.sock

Jan 30 17:42:22 compute-0 systemd[1]: Started libcrun container.

● libpod-ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735.scope - libcrun container
     Loaded: loaded (/run/systemd/transient/libpod-ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 17:43:54 UTC; 2h 37min ago
         IO: 0B read, 0B written
      Tasks: 8 (limit: 4096)
     Memory: 10.4M (peak: 12.5M)
        CPU: 2.482s
     CGroup: /machine.slice/libpod-ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735.scope
             └─container
               └─198927 /bin/podman_exporter --web.config.file=/etc/podman_exporter/podman_exporter.yaml

Jan 30 17:43:54 compute-0 systemd[1]: Started libcrun container.

● libpod-e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0.scope - libcrun container
     Loaded: loaded (/run/systemd/transient/libpod-e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 17:43:36 UTC; 2h 37min ago
         IO: 0B read, 0B written
      Tasks: 5 (limit: 4096)
     Memory: 11.0M (peak: 12.8M)
        CPU: 1.954s
     CGroup: /machine.slice/libpod-e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0.scope
             └─container
               └─195798 /bin/node_exporter --web.config.file=/etc/node_exporter/node_exporter.yaml --web.disable-exporter-metrics --collector.systemd "--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service" --no-collector.dmi --no-collector.entropy --no-collector.thermal_zone --no-collector.time --no-collector.timex --no-collector.uname --no-collector.stat --no-collector.hwmon --no-collector.os --no-collector.selinux --no-collector.textfile --no-collector.powersupplyclass --no-collector.pressure --no-collector.rapl

Jan 30 17:43:36 compute-0 systemd[1]: Started libcrun container.

● machine-qemu\x2d10\x2dinstance\x2d0000000f.scope - Virtual Machine qemu-10-instance-0000000f
     Loaded: loaded (/run/systemd/transient/machine-qemu\x2d10\x2dinstance\x2d0000000f.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 17:59:43 UTC; 2h 21min ago
         IO: 48.6G read, 714.2M written
      Tasks: 21 (limit: 16384)
     Memory: 1.1G (peak: 1.2G)
        CPU: 2h 22min 34.255s
     CGroup: /machine.slice/machine-qemu\x2d10\x2dinstance\x2d0000000f.scope
             └─libvirt
               └─216930 /usr/libexec/qemu-kvm -name guest=instance-0000000f,debug-threads=on -S -object "{\"qom-type\":\"secret\",\"id\":\"masterKey0\",\"format\":\"raw\",\"file\":\"/var/lib/libvirt/qemu/domain-10-instance-0000000f/master-key.aes\"}" -machine pc-q35-rhel9.8.0,usb=off,dump-guest-core=off,memory-backend=pc.ram,hpet=off,acpi=on -accel kvm -cpu EPYC-Rome,x2apic=on,tsc-deadline=on,hypervisor=on,tsc-adjust=on,spec-ctrl=on,stibp=on,ssbd=on,cmp-legacy=on,overflow-recov=on,succor=on,ibrs=on,amd-ssbd=on,virt-ssbd=on,lbrv=on,tsc-scale=on,vmcb-clean=on,flushbyasid=on,pause-filter=on,pfthreshold=on,svme-addr-chk=on,lfence-always-serializing=on,xsaves=off -m size=1048576k -object "{\"qom-type\":\"memory-backend-ram\",\"id\":\"pc.ram\",\"size\":1073741824}" -overcommit mem-lock=off -smp 1,sockets=1,dies=1,clusters=1,cores=1,threads=1 -uuid 69c40217-ae22-4704-ad01-f2ca06c42d58 -smbios "type=1,manufacturer=RDO,product=OpenStack Compute,version=27.5.2-0.20260127144738.eaa65f0.el9,serial=69c40217-ae22-4704-ad01-f2ca06c42d58,uuid=69c40217-ae22-4704-ad01-f2ca06c42d58,family=Virtual Machine" -no-user-config -nodefaults -chardev socket,id=charmonitor,fd=33,server=on,wait=off -mon chardev=charmonitor,id=monitor,mode=control -rtc base=utc,driftfix=slew -global kvm-pit.lost_tick_policy=delay -no-shutdown -boot strict=on -device "{\"driver\":\"pcie-root-port\",\"port\":16,\"chassis\":1,\"id\":\"pci.1\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":17,\"chassis\":2,\"id\":\"pci.2\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":18,\"chassis\":3,\"id\":\"pci.3\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":19,\"chassis\":4,\"id\":\"pci.4\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":20,\"chassis\":5,\"id\":\"pci.5\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":21,\"chassis\":6,\"id\":\"pci.6\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":22,\"chassis\":7,\"id\":\"pci.7\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":23,\"chassis\":8,\"id\":\"pci.8\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":24,\"chassis\":9,\"id\":\"pci.9\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":25,\"chassis\":10,\"id\":\"pci.10\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":26,\"chassis\":11,\"id\":\"pci.11\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":27,\"chassis\":12,\"id\":\"pci.12\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":28,\"chassis\":13,\"id\":\"pci.13\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":29,\"chassis\":14,\"id\":\"pci.14\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":30,\"chassis\":15,\"id\":\"pci.15\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":31,\"chassis\":16,\"id\":\"pci.16\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":32,\"chassis\":17,\"id\":\"pci.17\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":33,\"chassis\":18,\"id\":\"pci.18\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":34,\"chassis\":19,\"id\":\"pci.19\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":35,\"chassis\":20,\"id\":\"pci.20\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":36,\"chassis\":21,\"id\":\"pci.21\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":37,\"chassis\":22,\"id\":\"pci.22\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":38,\"chassis\":23,\"id\":\"pci.23\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":39,\"chassis\":24,\"id\":\"pci.24\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":40,\"chassis\":25,\"id\":\"pci.25\",\"bus\":\"pcie.0\",\"addr\":\"0x5\"}" -device "{\"driver\":\"pcie-pci-bridge\",\"id\":\"pci.26\",\"bus\":\"pci.1\",\"addr\":\"0x0\"}" -device "{\"driver\":\"piix3-usb-uhci\",\"id\":\"usb\",\"bus\":\"pci.26\",\"addr\":\"0x1\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/_base/bbb3d8d61ef6b68186f44149e3aba39e4a3bf32e\",\"node-name\":\"libvirt-3-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/69c40217-ae22-4704-ad01-f2ca06c42d58/disk\",\"node-name\":\"libvirt-2-storage\",\"auto-read-only\":true,\"discard\":\"unmap\",\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"node-name\":\"libvirt-2-format\",\"read-only\":false,\"cache\":{\"direct\":true,\"no-flush\":false},\"driver\":\"qcow2\",\"file\":\"libvirt-2-storage\",\"backing\":\"libvirt-3-storage\"}" -device "{\"driver\":\"virtio-blk-pci\",\"bus\":\"pci.3\",\"addr\":\"0x0\",\"drive\":\"libvirt-2-format\",\"id\":\"virtio-disk0\",\"bootindex\":1,\"write-cache\":\"on\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/69c40217-ae22-4704-ad01-f2ca06c42d58/disk.config\",\"node-name\":\"libvirt-1-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -device "{\"driver\":\"ide-cd\",\"bus\":\"ide.0\",\"drive\":\"libvirt-1-storage\",\"id\":\"sata0-0-0\",\"write-cache\":\"on\"}" -netdev "{\"type\":\"tap\",\"fd\":\"39\",\"vhost\":true,\"vhostfd\":\"41\",\"id\":\"hostnet0\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet0\",\"id\":\"net0\",\"mac\":\"fa:16:3e:d1:43:c6\",\"bus\":\"pci.2\",\"addr\":\"0x0\"}" -add-fd set=0,fd=37,opaque=serial0-log -chardev pty,id=charserial0,logfile=/dev/fdset/0,logappend=on -device "{\"driver\":\"isa-serial\",\"chardev\":\"charserial0\",\"id\":\"serial0\",\"index\":0}" -device "{\"driver\":\"usb-tablet\",\"id\":\"input0\",\"bus\":\"usb.0\",\"port\":\"1\"}" -audiodev "{\"id\":\"audio1\",\"driver\":\"none\"}" -object "{\"qom-type\":\"tls-creds-x509\",\"id\":\"vnc-tls-creds0\",\"dir\":\"/etc/pki/qemu\",\"endpoint\":\"server\",\"verify-peer\":true}" -vnc "[::0]:2,tls-creds=vnc-tls-creds0,audiodev=audio1" -device "{\"driver\":\"virtio-vga\",\"id\":\"video0\",\"max_outputs\":1,\"bus\":\"pcie.0\",\"addr\":\"0x1\"}" -global ICH9-LPC.noreboot=off -watchdog-action reset -device "{\"driver\":\"virtio-balloon-pci\",\"id\":\"balloon0\",\"bus\":\"pci.4\",\"addr\":\"0x0\"}" -object "{\"qom-type\":\"rng-random\",\"id\":\"objrng0\",\"filename\":\"/dev/urandom\"}" -device "{\"driver\":\"virtio-rng-pci\",\"rng\":\"objrng0\",\"id\":\"rng0\",\"bus\":\"pci.5\",\"addr\":\"0x0\"}" -device "{\"driver\":\"vmcoreinfo\"}" -sandbox on,obsolete=deny,elevateprivileges=deny,spawn=deny,resourcecontrol=deny -msg timestamp=on

Jan 30 17:59:43 compute-0 systemd[1]: Started Virtual Machine qemu-10-instance-0000000f.

● machine-qemu\x2d9\x2dinstance\x2d0000000d.scope - Virtual Machine qemu-9-instance-0000000d
     Loaded: loaded (/run/systemd/transient/machine-qemu\x2d9\x2dinstance\x2d0000000d.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 17:59:38 UTC; 2h 21min ago
         IO: 48.4G read, 712.2M written
      Tasks: 21 (limit: 16384)
     Memory: 1.2G (peak: 1.3G)
        CPU: 2h 22min 43.705s
     CGroup: /machine.slice/machine-qemu\x2d9\x2dinstance\x2d0000000d.scope
             └─libvirt
               └─216871 /usr/libexec/qemu-kvm -name guest=instance-0000000d,debug-threads=on -S -object "{\"qom-type\":\"secret\",\"id\":\"masterKey0\",\"format\":\"raw\",\"file\":\"/var/lib/libvirt/qemu/domain-9-instance-0000000d/master-key.aes\"}" -machine pc-q35-rhel9.8.0,usb=off,dump-guest-core=off,memory-backend=pc.ram,hpet=off,acpi=on -accel kvm -cpu EPYC-Rome,x2apic=on,tsc-deadline=on,hypervisor=on,tsc-adjust=on,spec-ctrl=on,stibp=on,ssbd=on,cmp-legacy=on,overflow-recov=on,succor=on,ibrs=on,amd-ssbd=on,virt-ssbd=on,lbrv=on,tsc-scale=on,vmcb-clean=on,flushbyasid=on,pause-filter=on,pfthreshold=on,svme-addr-chk=on,lfence-always-serializing=on,xsaves=off -m size=1048576k -object "{\"qom-type\":\"memory-backend-ram\",\"id\":\"pc.ram\",\"size\":1073741824}" -overcommit mem-lock=off -smp 1,sockets=1,dies=1,clusters=1,cores=1,threads=1 -uuid 639167ea-9de1-4930-b106-5a6f4a1a260d -smbios "type=1,manufacturer=RDO,product=OpenStack Compute,version=27.5.2-0.20260127144738.eaa65f0.el9,serial=639167ea-9de1-4930-b106-5a6f4a1a260d,uuid=639167ea-9de1-4930-b106-5a6f4a1a260d,family=Virtual Machine" -no-user-config -nodefaults -chardev socket,id=charmonitor,fd=26,server=on,wait=off -mon chardev=charmonitor,id=monitor,mode=control -rtc base=utc,driftfix=slew -global kvm-pit.lost_tick_policy=delay -no-shutdown -boot strict=on -device "{\"driver\":\"pcie-root-port\",\"port\":16,\"chassis\":1,\"id\":\"pci.1\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":17,\"chassis\":2,\"id\":\"pci.2\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":18,\"chassis\":3,\"id\":\"pci.3\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":19,\"chassis\":4,\"id\":\"pci.4\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":20,\"chassis\":5,\"id\":\"pci.5\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":21,\"chassis\":6,\"id\":\"pci.6\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":22,\"chassis\":7,\"id\":\"pci.7\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":23,\"chassis\":8,\"id\":\"pci.8\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":24,\"chassis\":9,\"id\":\"pci.9\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":25,\"chassis\":10,\"id\":\"pci.10\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":26,\"chassis\":11,\"id\":\"pci.11\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":27,\"chassis\":12,\"id\":\"pci.12\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":28,\"chassis\":13,\"id\":\"pci.13\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":29,\"chassis\":14,\"id\":\"pci.14\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":30,\"chassis\":15,\"id\":\"pci.15\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":31,\"chassis\":16,\"id\":\"pci.16\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":32,\"chassis\":17,\"id\":\"pci.17\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":33,\"chassis\":18,\"id\":\"pci.18\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":34,\"chassis\":19,\"id\":\"pci.19\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":35,\"chassis\":20,\"id\":\"pci.20\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":36,\"chassis\":21,\"id\":\"pci.21\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":37,\"chassis\":22,\"id\":\"pci.22\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":38,\"chassis\":23,\"id\":\"pci.23\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":39,\"chassis\":24,\"id\":\"pci.24\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":40,\"chassis\":25,\"id\":\"pci.25\",\"bus\":\"pcie.0\",\"addr\":\"0x5\"}" -device "{\"driver\":\"pcie-pci-bridge\",\"id\":\"pci.26\",\"bus\":\"pci.1\",\"addr\":\"0x0\"}" -device "{\"driver\":\"piix3-usb-uhci\",\"id\":\"usb\",\"bus\":\"pci.26\",\"addr\":\"0x1\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/_base/bbb3d8d61ef6b68186f44149e3aba39e4a3bf32e\",\"node-name\":\"libvirt-3-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/639167ea-9de1-4930-b106-5a6f4a1a260d/disk\",\"node-name\":\"libvirt-2-storage\",\"auto-read-only\":true,\"discard\":\"unmap\",\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"node-name\":\"libvirt-2-format\",\"read-only\":false,\"cache\":{\"direct\":true,\"no-flush\":false},\"driver\":\"qcow2\",\"file\":\"libvirt-2-storage\",\"backing\":\"libvirt-3-storage\"}" -device "{\"driver\":\"virtio-blk-pci\",\"bus\":\"pci.3\",\"addr\":\"0x0\",\"drive\":\"libvirt-2-format\",\"id\":\"virtio-disk0\",\"bootindex\":1,\"write-cache\":\"on\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/639167ea-9de1-4930-b106-5a6f4a1a260d/disk.config\",\"node-name\":\"libvirt-1-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -device "{\"driver\":\"ide-cd\",\"bus\":\"ide.0\",\"drive\":\"libvirt-1-storage\",\"id\":\"sata0-0-0\",\"write-cache\":\"on\"}" -netdev "{\"type\":\"tap\",\"fd\":\"31\",\"vhost\":true,\"vhostfd\":\"36\",\"id\":\"hostnet0\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet0\",\"id\":\"net0\",\"mac\":\"fa:16:3e:72:3c:9d\",\"bus\":\"pci.2\",\"addr\":\"0x0\"}" -add-fd set=0,fd=29,opaque=serial0-log -chardev pty,id=charserial0,logfile=/dev/fdset/0,logappend=on -device "{\"driver\":\"isa-serial\",\"chardev\":\"charserial0\",\"id\":\"serial0\",\"index\":0}" -device "{\"driver\":\"usb-tablet\",\"id\":\"input0\",\"bus\":\"usb.0\",\"port\":\"1\"}" -audiodev "{\"id\":\"audio1\",\"driver\":\"none\"}" -object "{\"qom-type\":\"tls-creds-x509\",\"id\":\"vnc-tls-creds0\",\"dir\":\"/etc/pki/qemu\",\"endpoint\":\"server\",\"verify-peer\":true}" -vnc "[::0]:1,tls-creds=vnc-tls-creds0,audiodev=audio1" -device "{\"driver\":\"virtio-vga\",\"id\":\"video0\",\"max_outputs\":1,\"bus\":\"pcie.0\",\"addr\":\"0x1\"}" -global ICH9-LPC.noreboot=off -watchdog-action reset -device "{\"driver\":\"virtio-balloon-pci\",\"id\":\"balloon0\",\"bus\":\"pci.4\",\"addr\":\"0x0\"}" -object "{\"qom-type\":\"rng-random\",\"id\":\"objrng0\",\"filename\":\"/dev/urandom\"}" -device "{\"driver\":\"virtio-rng-pci\",\"rng\":\"objrng0\",\"id\":\"rng0\",\"bus\":\"pci.5\",\"addr\":\"0x0\"}" -device "{\"driver\":\"vmcoreinfo\"}" -sandbox on,obsolete=deny,elevateprivileges=deny,spawn=deny,resourcecontrol=deny -msg timestamp=on

Jan 30 17:59:38 compute-0 systemd[1]: Started Virtual Machine qemu-9-instance-0000000d.

● session-1.scope - Session 1 of User zuul
     Loaded: loaded (/run/systemd/transient/session-1.scope; transient)
  Transient: yes
     Active: active (abandoned) since Fri 2026-01-30 16:50:03 UTC; 3h 31min ago
         IO: 0B read, 0B written
      Tasks: 1
     Memory: 17.1M (peak: 38.8M)
        CPU: 1min 15.441s
     CGroup: /user.slice/user-1000.slice/session-1.scope
             └─4517 /usr/bin/python3

Jan 30 16:52:00 np0005602930.novalocal python3[7135]: ansible-ansible.legacy.copy Invoked with src=/home/zuul/.ansible/tmp/ansible-tmp-1769791920.0552924-103-261349914731947/source dest=/etc/NetworkManager/system-connections/ci-private-network.nmconnection mode=0600 owner=root group=root follow=False _original_basename=bootstrap-ci-network-nm-connection.nmconnection.j2 checksum=109ec7d6cdc7b13a27b9eec944ea9f04b1a27f33 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 30 16:52:00 np0005602930.novalocal sudo[7133]: pam_unix(sudo:session): session closed for user root
Jan 30 16:52:01 np0005602930.novalocal sudo[7183]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-vdompbrwzadekxdxnlktbfqnqdlvofho ; OS_CLOUD=vexxhost /usr/bin/python3'
Jan 30 16:52:01 np0005602930.novalocal sudo[7183]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 16:52:01 np0005602930.novalocal python3[7185]: ansible-ansible.builtin.systemd Invoked with name=NetworkManager state=restarted daemon_reload=False daemon_reexec=False scope=system no_block=False enabled=None force=None masked=None
Jan 30 16:52:01 np0005602930.novalocal sudo[7183]: pam_unix(sudo:session): session closed for user root
Jan 30 16:52:01 np0005602930.novalocal python3[7269]: ansible-ansible.legacy.command Invoked with _raw_params=ip route zuul_log_id=fa163ef9-e89a-ad5c-c39b-0000000000b2-0-controller zuul_ansible_split_streams=False _uses_shell=False warn=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None
Jan 30 16:53:01 np0005602930.novalocal sshd-session[4316]: Received disconnect from 38.102.83.114 port 49486:11: disconnected by user
Jan 30 16:53:01 np0005602930.novalocal sshd-session[4316]: Disconnected from user zuul 38.102.83.114 port 49486
Jan 30 16:53:01 np0005602930.novalocal sshd-session[4303]: pam_unix(sshd:session): session closed for user zuul

● session-100.scope - Session 100 of User zuul
     Loaded: loaded (/run/systemd/transient/session-100.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:24:05 UTC; 57min ago
         IO: 0B read, 8.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.8M)
        CPU: 67ms
     CGroup: /user.slice/user-1000.slice/session-100.scope
             ├─245026 "sshd-session: zuul [priv]"
             └─245029 "sshd-session: zuul@notty"

Jan 30 19:24:05 compute-0 systemd[1]: Started Session 100 of User zuul.
Jan 30 19:24:05 compute-0 sudo[245059]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni genev_sys_6081 'icmp and ((ether host fa:16:3e:f6:73:e9 and ether host fa:16:3e:4f:2c:b2) or (ether host fa:16:3e:67:59:a6 and ether host fa:16:3e:0f:28:e8))' -w /tmp/tmp.IY3gEBCT4k
Jan 30 19:24:05 compute-0 sudo[245059]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:26:05 compute-0 sudo[245059]: pam_unix(sudo:session): session closed for user root

● session-102.scope - Session 102 of User zuul
     Loaded: loaded (/run/systemd/transient/session-102.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:24:14 UTC; 57min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 4.0M)
        CPU: 60ms
     CGroup: /user.slice/user-1000.slice/session-102.scope
             ├─245122 "sshd-session: zuul [priv]"
             └─245125 "sshd-session: zuul@notty"

Jan 30 19:24:14 compute-0 systemd[1]: Started Session 102 of User zuul.
Jan 30 19:24:14 compute-0 sudo[245126]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.IY3gEBCT4k
Jan 30 19:24:14 compute-0 sudo[245126]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:24:14 compute-0 sudo[245126]: pam_unix(sudo:session): session closed for user root

● session-103.scope - Session 103 of User zuul
     Loaded: loaded (/run/systemd/transient/session-103.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:24:40 UTC; 56min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.8M)
        CPU: 62ms
     CGroup: /user.slice/user-1000.slice/session-103.scope
             ├─245415 "sshd-session: zuul [priv]"
             └─245418 "sshd-session: zuul@notty"

Jan 30 19:24:40 compute-0 systemd[1]: Started Session 103 of User zuul.
Jan 30 19:24:40 compute-0 sudo[245448]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:4e:e9:19 -w /tmp/tmp.hOaUHBJ3bA
Jan 30 19:24:40 compute-0 sudo[245448]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:26:40 compute-0 sudo[245448]: pam_unix(sudo:session): session closed for user root

● session-105.scope - Session 105 of User zuul
     Loaded: loaded (/run/systemd/transient/session-105.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:24:49 UTC; 56min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 4.0M)
        CPU: 69ms
     CGroup: /user.slice/user-1000.slice/session-105.scope
             ├─245517 "sshd-session: zuul [priv]"
             └─245520 "sshd-session: zuul@notty"

Jan 30 19:24:49 compute-0 systemd[1]: Started Session 105 of User zuul.
Jan 30 19:24:49 compute-0 sudo[245521]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.hOaUHBJ3bA
Jan 30 19:24:49 compute-0 sudo[245521]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:24:49 compute-0 sudo[245521]: pam_unix(sudo:session): session closed for user root

● session-108.scope - Session 108 of User zuul
     Loaded: loaded (/run/systemd/transient/session-108.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:25:12 UTC; 56min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.7M)
        CPU: 92ms
     CGroup: /user.slice/user-1000.slice/session-108.scope
             ├─245810 "sshd-session: zuul [priv]"
             └─245829 "sshd-session: zuul@notty"

Jan 30 19:25:12 compute-0 systemd[1]: Started Session 108 of User zuul.
Jan 30 19:25:13 compute-0 sudo[245882]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:a8:68:85 -w /tmp/tmp.awEZtS4ulL
Jan 30 19:25:13 compute-0 sudo[245882]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:27:13 compute-0 sudo[245882]: pam_unix(sudo:session): session closed for user root

● session-110.scope - Session 110 of User zuul
     Loaded: loaded (/run/systemd/transient/session-110.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:25:22 UTC; 55min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.7M)
        CPU: 56ms
     CGroup: /user.slice/user-1000.slice/session-110.scope
             ├─245913 "sshd-session: zuul [priv]"
             └─245929 "sshd-session: zuul@notty"

Jan 30 19:25:22 compute-0 systemd[1]: Started Session 110 of User zuul.
Jan 30 19:25:22 compute-0 sudo[245954]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.awEZtS4ulL
Jan 30 19:25:22 compute-0 sudo[245954]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:25:22 compute-0 sudo[245954]: pam_unix(sudo:session): session closed for user root

● session-111.scope - Session 111 of User zuul
     Loaded: loaded (/run/systemd/transient/session-111.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:25:24 UTC; 55min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.8M)
        CPU: 58ms
     CGroup: /user.slice/user-1000.slice/session-111.scope
             ├─245989 "sshd-session: zuul [priv]"
             └─245992 "sshd-session: zuul@notty"

Jan 30 19:25:24 compute-0 systemd[1]: Started Session 111 of User zuul.
Jan 30 19:25:24 compute-0 sudo[246022]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni genev_sys_6081 'icmp and ((ether host fa:16:3e:f6:73:e9 and ether host fa:16:3e:4f:2c:b2) or (ether host fa:16:3e:67:59:a6 and ether host fa:16:3e:0f:28:e8))' -w /tmp/tmp.OO2SEBwfZW
Jan 30 19:25:24 compute-0 sudo[246022]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:27:24 compute-0 sudo[246022]: pam_unix(sudo:session): session closed for user root

● session-113.scope - Session 113 of User zuul
     Loaded: loaded (/run/systemd/transient/session-113.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:25:33 UTC; 55min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 4.1M)
        CPU: 54ms
     CGroup: /user.slice/user-1000.slice/session-113.scope
             ├─246092 "sshd-session: zuul [priv]"
             └─246095 "sshd-session: zuul@notty"

Jan 30 19:25:33 compute-0 systemd[1]: Started Session 113 of User zuul.
Jan 30 19:25:33 compute-0 sudo[246096]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.OO2SEBwfZW
Jan 30 19:25:33 compute-0 sudo[246096]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:25:33 compute-0 sudo[246096]: pam_unix(sudo:session): session closed for user root

● session-120.scope - Session 120 of User zuul
     Loaded: loaded (/run/systemd/transient/session-120.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:27:13 UTC; 54min ago
         IO: 0B read, 12.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.7M)
        CPU: 73ms
     CGroup: /user.slice/user-1000.slice/session-120.scope
             ├─246976 "sshd-session: zuul [priv]"
             └─246979 "sshd-session: zuul@notty"

Jan 30 19:27:13 compute-0 systemd[1]: Started Session 120 of User zuul.
Jan 30 19:27:13 compute-0 sudo[247009]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:aa:8e:98 -w /tmp/tmp.CXmyebM9Eu
Jan 30 19:27:13 compute-0 sudo[247009]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)

● session-122.scope - Session 122 of User zuul
     Loaded: loaded (/run/systemd/transient/session-122.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:27:35 UTC; 53min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.8M)
        CPU: 79ms
     CGroup: /user.slice/user-1000.slice/session-122.scope
             ├─247166 "sshd-session: zuul [priv]"
             └─247169 "sshd-session: zuul@notty"

Jan 30 19:27:35 compute-0 systemd[1]: Started Session 122 of User zuul.
Jan 30 19:27:35 compute-0 sudo[247170]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.CXmyebM9Eu
Jan 30 19:27:35 compute-0 sudo[247170]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:27:35 compute-0 sudo[247170]: pam_unix(sudo:session): session closed for user root

● session-124.scope - Session 124 of User zuul
     Loaded: loaded (/run/systemd/transient/session-124.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:28:20 UTC; 52min ago
         IO: 0B read, 8.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.8M)
        CPU: 62ms
     CGroup: /user.slice/user-1000.slice/session-124.scope
             ├─247625 "sshd-session: zuul [priv]"
             └─247628 "sshd-session: zuul@notty"

Jan 30 19:28:20 compute-0 systemd[1]: Started Session 124 of User zuul.
Jan 30 19:28:20 compute-0 sudo[247658]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:09:cc:a5 -w /tmp/tmp.ZDG9ul3muD
Jan 30 19:28:20 compute-0 sudo[247658]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:30:20 compute-0 sudo[247658]: pam_unix(sudo:session): session closed for user root

● session-126.scope - Session 126 of User zuul
     Loaded: loaded (/run/systemd/transient/session-126.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:28:42 UTC; 52min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.9M)
        CPU: 53ms
     CGroup: /user.slice/user-1000.slice/session-126.scope
             ├─247788 "sshd-session: zuul [priv]"
             └─247791 "sshd-session: zuul@notty"

Jan 30 19:28:42 compute-0 systemd[1]: Started Session 126 of User zuul.
Jan 30 19:28:42 compute-0 sudo[247792]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.ZDG9ul3muD
Jan 30 19:28:42 compute-0 sudo[247792]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:28:42 compute-0 sudo[247792]: pam_unix(sudo:session): session closed for user root

● session-128.scope - Session 128 of User zuul
     Loaded: loaded (/run/systemd/transient/session-128.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:29:31 UTC; 51min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.8M)
        CPU: 60ms
     CGroup: /user.slice/user-1000.slice/session-128.scope
             ├─248316 "sshd-session: zuul [priv]"
             └─248319 "sshd-session: zuul@notty"

Jan 30 19:29:31 compute-0 systemd[1]: Started Session 128 of User zuul.
Jan 30 19:29:31 compute-0 sudo[248349]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:5e:d4:c0 -w /tmp/tmp.ozjGq4iD10
Jan 30 19:29:31 compute-0 sudo[248349]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:31:31 compute-0 sudo[248349]: pam_unix(sudo:session): session closed for user root

● session-130.scope - Session 130 of User zuul
     Loaded: loaded (/run/systemd/transient/session-130.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:29:40 UTC; 51min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.8M)
        CPU: 57ms
     CGroup: /user.slice/user-1000.slice/session-130.scope
             ├─248418 "sshd-session: zuul [priv]"
             └─248421 "sshd-session: zuul@notty"

Jan 30 19:29:40 compute-0 systemd[1]: Started Session 130 of User zuul.
Jan 30 19:29:40 compute-0 sudo[248422]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.ozjGq4iD10
Jan 30 19:29:40 compute-0 sudo[248422]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:29:40 compute-0 sudo[248422]: pam_unix(sudo:session): session closed for user root

● session-131.scope - Session 131 of User zuul
     Loaded: loaded (/run/systemd/transient/session-131.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:29:50 UTC; 51min ago
         IO: 0B read, 16.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.7M)
        CPU: 73ms
     CGroup: /user.slice/user-1000.slice/session-131.scope
             ├─248494 "sshd-session: zuul [priv]"
             └─248497 "sshd-session: zuul@notty"

Jan 30 19:29:50 compute-0 systemd[1]: Started Session 131 of User zuul.
Jan 30 19:29:50 compute-0 sudo[248527]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:7b:15:c5 -w /tmp/tmp.flInWUJXAw
Jan 30 19:29:50 compute-0 sudo[248527]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:31:50 compute-0 sudo[248527]: pam_unix(sudo:session): session closed for user root

● session-133.scope - Session 133 of User zuul
     Loaded: loaded (/run/systemd/transient/session-133.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:29:59 UTC; 51min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.9M)
        CPU: 52ms
     CGroup: /user.slice/user-1000.slice/session-133.scope
             ├─248553 "sshd-session: zuul [priv]"
             └─248568 "sshd-session: zuul@notty"

Jan 30 19:29:59 compute-0 systemd[1]: Started Session 133 of User zuul.
Jan 30 19:29:59 compute-0 sudo[248602]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.flInWUJXAw
Jan 30 19:29:59 compute-0 sudo[248602]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:29:59 compute-0 sudo[248602]: pam_unix(sudo:session): session closed for user root

● session-134.scope - Session 134 of User zuul
     Loaded: loaded (/run/systemd/transient/session-134.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:30:09 UTC; 51min ago
         IO: 0B read, 12.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.7M)
        CPU: 68ms
     CGroup: /user.slice/user-1000.slice/session-134.scope
             ├─248856 "sshd-session: zuul [priv]"
             └─248883 "sshd-session: zuul@notty"

Jan 30 19:30:09 compute-0 systemd[1]: Started Session 134 of User zuul.
Jan 30 19:30:09 compute-0 sudo[248929]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:7b:15:c5 -w /tmp/tmp.FzAYcBndTQ
Jan 30 19:30:09 compute-0 sudo[248929]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:32:09 compute-0 sudo[248929]: pam_unix(sudo:session): session closed for user root

● session-136.scope - Session 136 of User zuul
     Loaded: loaded (/run/systemd/transient/session-136.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:30:31 UTC; 50min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 4.3M)
        CPU: 56ms
     CGroup: /user.slice/user-1000.slice/session-136.scope
             ├─249050 "sshd-session: zuul [priv]"
             └─249053 "sshd-session: zuul@notty"

Jan 30 19:30:31 compute-0 systemd[1]: Started Session 136 of User zuul.
Jan 30 19:30:31 compute-0 sudo[249054]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.FzAYcBndTQ
Jan 30 19:30:31 compute-0 sudo[249054]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:30:31 compute-0 sudo[249054]: pam_unix(sudo:session): session closed for user root

● session-158.scope - Session 158 of User zuul
     Loaded: loaded (/run/systemd/transient/session-158.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 20:20:56 UTC; 22s ago
         IO: 102.3M read, 41.7M written
      Tasks: 19
     Memory: 668.6M (peak: 717.9M)
        CPU: 54.090s
     CGroup: /user.slice/user-1000.slice/session-158.scope
             ├─264277 "sshd-session: zuul [priv]"
             ├─264280 "sshd-session: zuul@notty"
             ├─264281 sudo bash -c "rm -rf /var/tmp/sos-osp && mkdir /var/tmp/sos-osp && sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp  -p container,openstack_edpm,system,storage,virt"
             ├─264314 /usr/bin/python3 -s /sbin/sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp -p container,openstack_edpm,system,storage,virt
             ├─267249 timeout 15s turbostat --debug sleep 10
             ├─267669 timeout 300s systemctl status --all
             ├─267670 systemctl status --all
             ├─267696 timeout 300s semanage node -l
             ├─267698 /usr/bin/python3 -EsI /usr/sbin/semanage node -l
             ├─267706 timeout --foreground 300s virsh -r dominfo instance-0000000d
             └─267707 virsh -r dominfo instance-0000000d

Jan 30 20:20:56 compute-0 systemd[1]: Started Session 158 of User zuul.
Jan 30 20:20:56 compute-0 sudo[264281]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/bash -c 'rm -rf /var/tmp/sos-osp && mkdir /var/tmp/sos-osp && sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp  -p container,openstack_edpm,system,storage,virt'
Jan 30 20:20:56 compute-0 sudo[264281]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 20:21:00 compute-0 ovs-vsctl[264494]: ovs|00001|db_ctl_base|ERR|no key "dpdk-init" in Open_vSwitch record "." column other_config

● session-48.scope - Session 48 of User zuul
     Loaded: loaded (/run/systemd/transient/session-48.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:16:52 UTC; 1h 4min ago
         IO: 1.4M read, 4.0K written
      Tasks: 2
     Memory: 2.5M (peak: 7.2M)
        CPU: 62ms
     CGroup: /user.slice/user-1000.slice/session-48.scope
             ├─240767 "sshd-session: zuul [priv]"
             └─240770 "sshd-session: zuul@notty"

Jan 30 19:16:52 compute-0 systemd[1]: Started Session 48 of User zuul.
Jan 30 19:16:52 compute-0 sudo[240800]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:04:31:06 -w /tmp/tmp.Psc1AtLmGJ
Jan 30 19:16:52 compute-0 sudo[240800]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:18:52 compute-0 sudo[240800]: pam_unix(sudo:session): session closed for user root

● session-50.scope - Session 50 of User zuul
     Loaded: loaded (/run/systemd/transient/session-50.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:17:14 UTC; 1h 4min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.9M)
        CPU: 61ms
     CGroup: /user.slice/user-1000.slice/session-50.scope
             ├─240930 "sshd-session: zuul [priv]"
             └─240933 "sshd-session: zuul@notty"

Jan 30 19:17:14 compute-0 systemd[1]: Started Session 50 of User zuul.
Jan 30 19:17:14 compute-0 sudo[240934]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.Psc1AtLmGJ
Jan 30 19:17:14 compute-0 sudo[240934]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:17:14 compute-0 sudo[240934]: pam_unix(sudo:session): session closed for user root

● session-51.scope - Session 51 of User zuul
     Loaded: loaded (/run/systemd/transient/session-51.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:17:22 UTC; 1h 3min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.8M)
        CPU: 76ms
     CGroup: /user.slice/user-1000.slice/session-51.scope
             ├─241003 "sshd-session: zuul [priv]"
             └─241006 "sshd-session: zuul@notty"

Jan 30 19:17:22 compute-0 systemd[1]: Started Session 51 of User zuul.
Jan 30 19:17:22 compute-0 sudo[241036]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:0f:37:cf -w /tmp/tmp.mrWy1qlSUf
Jan 30 19:17:22 compute-0 sudo[241036]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:19:22 compute-0 sudo[241036]: pam_unix(sudo:session): session closed for user root

● session-53.scope - Session 53 of User zuul
     Loaded: loaded (/run/systemd/transient/session-53.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:17:31 UTC; 1h 3min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.7M)
        CPU: 52ms
     CGroup: /user.slice/user-1000.slice/session-53.scope
             ├─241063 "sshd-session: zuul [priv]"
             └─241091 "sshd-session: zuul@notty"

Jan 30 19:17:31 compute-0 systemd[1]: Started Session 53 of User zuul.
Jan 30 19:17:31 compute-0 sudo[241109]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.mrWy1qlSUf
Jan 30 19:17:31 compute-0 sudo[241109]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:17:31 compute-0 sudo[241109]: pam_unix(sudo:session): session closed for user root

● session-54.scope - Session 54 of User zuul
     Loaded: loaded (/run/systemd/transient/session-54.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:17:39 UTC; 1h 3min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.8M)
        CPU: 66ms
     CGroup: /user.slice/user-1000.slice/session-54.scope
             ├─241146 "sshd-session: zuul [priv]"
             └─241149 "sshd-session: zuul@notty"

Jan 30 19:17:39 compute-0 systemd[1]: Started Session 54 of User zuul.
Jan 30 19:17:40 compute-0 sudo[241226]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:04:31:06 -w /tmp/tmp.z5AchxGv6Q
Jan 30 19:17:40 compute-0 sudo[241226]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:19:40 compute-0 sudo[241226]: pam_unix(sudo:session): session closed for user root

● session-56.scope - Session 56 of User zuul
     Loaded: loaded (/run/systemd/transient/session-56.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:17:49 UTC; 1h 3min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.9M)
        CPU: 48ms
     CGroup: /user.slice/user-1000.slice/session-56.scope
             ├─241295 "sshd-session: zuul [priv]"
             └─241298 "sshd-session: zuul@notty"

Jan 30 19:17:49 compute-0 systemd[1]: Started Session 56 of User zuul.
Jan 30 19:17:49 compute-0 sudo[241299]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.z5AchxGv6Q
Jan 30 19:17:49 compute-0 sudo[241299]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:17:49 compute-0 sudo[241299]: pam_unix(sudo:session): session closed for user root

● session-57.scope - Session 57 of User zuul
     Loaded: loaded (/run/systemd/transient/session-57.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:17:56 UTC; 1h 3min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.7M)
        CPU: 61ms
     CGroup: /user.slice/user-1000.slice/session-57.scope
             ├─241344 "sshd-session: zuul [priv]"
             └─241347 "sshd-session: zuul@notty"

Jan 30 19:17:56 compute-0 systemd[1]: Started Session 57 of User zuul.
Jan 30 19:17:56 compute-0 sudo[241377]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:d1:5a:46 -w /tmp/tmp.8gMOFzUX7A
Jan 30 19:17:56 compute-0 sudo[241377]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:19:56 compute-0 sudo[241377]: pam_unix(sudo:session): session closed for user root

● session-59.scope - Session 59 of User zuul
     Loaded: loaded (/run/systemd/transient/session-59.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:18:05 UTC; 1h 3min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 4.0M)
        CPU: 52ms
     CGroup: /user.slice/user-1000.slice/session-59.scope
             ├─241444 "sshd-session: zuul [priv]"
             └─241447 "sshd-session: zuul@notty"

Jan 30 19:18:05 compute-0 systemd[1]: Started Session 59 of User zuul.
Jan 30 19:18:05 compute-0 sudo[241448]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.8gMOFzUX7A
Jan 30 19:18:05 compute-0 sudo[241448]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:18:05 compute-0 sudo[241448]: pam_unix(sudo:session): session closed for user root

● session-60.scope - Session 60 of User zuul
     Loaded: loaded (/run/systemd/transient/session-60.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:18:13 UTC; 1h 3min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.8M)
        CPU: 81ms
     CGroup: /user.slice/user-1000.slice/session-60.scope
             ├─241519 "sshd-session: zuul [priv]"
             └─241522 "sshd-session: zuul@notty"

Jan 30 19:18:13 compute-0 systemd[1]: Started Session 60 of User zuul.
Jan 30 19:18:14 compute-0 sudo[241552]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:d1:5a:46 -w /tmp/tmp.3bkh7rFkPL
Jan 30 19:18:14 compute-0 sudo[241552]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:20:14 compute-0 sudo[241552]: pam_unix(sudo:session): session closed for user root

● session-62.scope - Session 62 of User zuul
     Loaded: loaded (/run/systemd/transient/session-62.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:18:35 UTC; 1h 2min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 4.1M)
        CPU: 47ms
     CGroup: /user.slice/user-1000.slice/session-62.scope
             ├─241662 "sshd-session: zuul [priv]"
             └─241665 "sshd-session: zuul@notty"

Jan 30 19:18:35 compute-0 systemd[1]: Started Session 62 of User zuul.
Jan 30 19:18:36 compute-0 sudo[241666]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.3bkh7rFkPL
Jan 30 19:18:36 compute-0 sudo[241666]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:18:36 compute-0 sudo[241666]: pam_unix(sudo:session): session closed for user root

● session-68.scope - Session 68 of User zuul
     Loaded: loaded (/run/systemd/transient/session-68.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:20:05 UTC; 1h 1min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.7M)
        CPU: 78ms
     CGroup: /user.slice/user-1000.slice/session-68.scope
             ├─242466 "sshd-session: zuul [priv]"
             └─242469 "sshd-session: zuul@notty"

Jan 30 19:20:05 compute-0 systemd[1]: Started Session 68 of User zuul.
Jan 30 19:20:05 compute-0 sudo[242499]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:bb:2f:ea -w /tmp/tmp.so4SAnqDFQ
Jan 30 19:20:05 compute-0 sudo[242499]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)

● session-70.scope - Session 70 of User zuul
     Loaded: loaded (/run/systemd/transient/session-70.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:20:14 UTC; 1h 1min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.9M)
        CPU: 52ms
     CGroup: /user.slice/user-1000.slice/session-70.scope
             ├─242564 "sshd-session: zuul [priv]"
             └─242586 "sshd-session: zuul@notty"

Jan 30 19:20:14 compute-0 systemd[1]: Started Session 70 of User zuul.
Jan 30 19:20:14 compute-0 sudo[242612]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.so4SAnqDFQ
Jan 30 19:20:14 compute-0 sudo[242612]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:20:14 compute-0 sudo[242612]: pam_unix(sudo:session): session closed for user root

● session-71.scope - Session 71 of User zuul
     Loaded: loaded (/run/systemd/transient/session-71.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:20:15 UTC; 1h 1min ago
         IO: 0B read, 8.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.7M)
        CPU: 56ms
     CGroup: /user.slice/user-1000.slice/session-71.scope
             ├─242638 "sshd-session: zuul [priv]"
             └─242641 "sshd-session: zuul@notty"

Jan 30 19:20:15 compute-0 systemd[1]: Started Session 71 of User zuul.
Jan 30 19:20:15 compute-0 sudo[242671]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:07:fa:b1 -w /tmp/tmp.RrRvUsbvLo
Jan 30 19:20:15 compute-0 sudo[242671]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)

● session-73.scope - Session 73 of User zuul
     Loaded: loaded (/run/systemd/transient/session-73.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:20:25 UTC; 1h 0min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.8M)
        CPU: 67ms
     CGroup: /user.slice/user-1000.slice/session-73.scope
             ├─242742 "sshd-session: zuul [priv]"
             └─242745 "sshd-session: zuul@notty"

Jan 30 19:20:25 compute-0 systemd[1]: Started Session 73 of User zuul.
Jan 30 19:20:25 compute-0 sudo[242746]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.RrRvUsbvLo
Jan 30 19:20:25 compute-0 sudo[242746]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:20:25 compute-0 sudo[242746]: pam_unix(sudo:session): session closed for user root

● session-74.scope - Session 74 of User zuul
     Loaded: loaded (/run/systemd/transient/session-74.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:20:26 UTC; 1h 0min ago
         IO: 0B read, 8.0K written
      Tasks: 2
     Memory: 1.0M (peak: 6.0M)
        CPU: 57ms
     CGroup: /user.slice/user-1000.slice/session-74.scope
             ├─242772 "sshd-session: zuul [priv]"
             └─242775 "sshd-session: zuul@notty"

Jan 30 19:20:26 compute-0 systemd[1]: Started Session 74 of User zuul.
Jan 30 19:20:26 compute-0 sudo[242805]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni genev_sys_6081 icmp and ether host fa:16:3e:ea:8e:55 and ether host fa:16:3e:5e:32:2f -w /tmp/tmp.SxNmNQ7XLe
Jan 30 19:20:26 compute-0 sudo[242805]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:22:26 compute-0 sudo[242805]: pam_unix(sudo:session): session closed for user root

● session-76.scope - Session 76 of User zuul
     Loaded: loaded (/run/systemd/transient/session-76.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:20:35 UTC; 1h 0min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.9M)
        CPU: 56ms
     CGroup: /user.slice/user-1000.slice/session-76.scope
             ├─242832 "sshd-session: zuul [priv]"
             └─242835 "sshd-session: zuul@notty"

Jan 30 19:20:35 compute-0 systemd[1]: Started Session 76 of User zuul.
Jan 30 19:20:35 compute-0 sudo[242836]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.SxNmNQ7XLe
Jan 30 19:20:35 compute-0 sudo[242836]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:20:35 compute-0 sudo[242836]: pam_unix(sudo:session): session closed for user root

● session-79.scope - Session 79 of User zuul
     Loaded: loaded (/run/systemd/transient/session-79.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:21:17 UTC; 1h 0min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.8M)
        CPU: 68ms
     CGroup: /user.slice/user-1000.slice/session-79.scope
             ├─243425 "sshd-session: zuul [priv]"
             └─243428 "sshd-session: zuul@notty"

Jan 30 19:21:17 compute-0 systemd[1]: Started Session 79 of User zuul.
Jan 30 19:21:17 compute-0 sudo[243458]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:bb:2f:ea -w /tmp/tmp.3QnN0FAfto
Jan 30 19:21:17 compute-0 sudo[243458]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:23:17 compute-0 sudo[243458]: pam_unix(sudo:session): session closed for user root

● session-81.scope - Session 81 of User zuul
     Loaded: loaded (/run/systemd/transient/session-81.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:21:26 UTC; 59min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.8M)
        CPU: 52ms
     CGroup: /user.slice/user-1000.slice/session-81.scope
             ├─243524 "sshd-session: zuul [priv]"
             └─243527 "sshd-session: zuul@notty"

Jan 30 19:21:26 compute-0 systemd[1]: Started Session 81 of User zuul.
Jan 30 19:21:26 compute-0 sudo[243528]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.3QnN0FAfto
Jan 30 19:21:26 compute-0 sudo[243528]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:21:26 compute-0 sudo[243528]: pam_unix(sudo:session): session closed for user root

● session-82.scope - Session 82 of User zuul
     Loaded: loaded (/run/systemd/transient/session-82.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:21:27 UTC; 59min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.7M)
        CPU: 60ms
     CGroup: /user.slice/user-1000.slice/session-82.scope
             ├─243554 "sshd-session: zuul [priv]"
             └─243557 "sshd-session: zuul@notty"

Jan 30 19:21:27 compute-0 systemd[1]: Started Session 82 of User zuul.
Jan 30 19:21:27 compute-0 sudo[243587]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:07:fa:b1 -w /tmp/tmp.ToK4ohLhb3
Jan 30 19:21:27 compute-0 sudo[243587]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)

● session-84.scope - Session 84 of User zuul
     Loaded: loaded (/run/systemd/transient/session-84.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:21:37 UTC; 59min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.6M)
        CPU: 53ms
     CGroup: /user.slice/user-1000.slice/session-84.scope
             ├─243613 "sshd-session: zuul [priv]"
             └─243616 "sshd-session: zuul@notty"

Jan 30 19:21:37 compute-0 systemd[1]: Started Session 84 of User zuul.
Jan 30 19:21:37 compute-0 sudo[243617]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.ToK4ohLhb3
Jan 30 19:21:37 compute-0 sudo[243617]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:21:37 compute-0 sudo[243617]: pam_unix(sudo:session): session closed for user root

● session-85.scope - Session 85 of User zuul
     Loaded: loaded (/run/systemd/transient/session-85.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:21:38 UTC; 59min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.7M)
        CPU: 56ms
     CGroup: /user.slice/user-1000.slice/session-85.scope
             ├─243643 "sshd-session: zuul [priv]"
             └─243668 "sshd-session: zuul@notty"

Jan 30 19:21:38 compute-0 systemd[1]: Started Session 85 of User zuul.
Jan 30 19:21:38 compute-0 sudo[243717]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni genev_sys_6081 icmp and ether host fa:16:3e:ea:8e:55 and ether host fa:16:3e:5e:32:2f -w /tmp/tmp.SZ2HwwYsUw
Jan 30 19:21:38 compute-0 sudo[243717]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:23:38 compute-0 sudo[243717]: pam_unix(sudo:session): session closed for user root

● session-87.scope - Session 87 of User zuul
     Loaded: loaded (/run/systemd/transient/session-87.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:21:47 UTC; 59min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.9M)
        CPU: 60ms
     CGroup: /user.slice/user-1000.slice/session-87.scope
             ├─243790 "sshd-session: zuul [priv]"
             └─243793 "sshd-session: zuul@notty"

Jan 30 19:21:47 compute-0 systemd[1]: Started Session 87 of User zuul.
Jan 30 19:21:47 compute-0 sudo[243794]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.SZ2HwwYsUw
Jan 30 19:21:47 compute-0 sudo[243794]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:21:47 compute-0 sudo[243794]: pam_unix(sudo:session): session closed for user root

● session-94.scope - Session 94 of User zuul
     Loaded: loaded (/run/systemd/transient/session-94.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:23:01 UTC; 58min ago
         IO: 0B read, 8.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.7M)
        CPU: 68ms
     CGroup: /user.slice/user-1000.slice/session-94.scope
             ├─244430 "sshd-session: zuul [priv]"
             └─244433 "sshd-session: zuul@notty"

Jan 30 19:23:01 compute-0 systemd[1]: Started Session 94 of User zuul.
Jan 30 19:23:01 compute-0 sudo[244463]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:4e:e9:19 -w /tmp/tmp.ncKO2ywjPb
Jan 30 19:23:01 compute-0 sudo[244463]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)

● session-96.scope - Session 96 of User zuul
     Loaded: loaded (/run/systemd/transient/session-96.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:23:23 UTC; 57min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.7M)
        CPU: 53ms
     CGroup: /user.slice/user-1000.slice/session-96.scope
             ├─244575 "sshd-session: zuul [priv]"
             └─244578 "sshd-session: zuul@notty"

Jan 30 19:23:23 compute-0 systemd[1]: Started Session 96 of User zuul.
Jan 30 19:23:23 compute-0 sudo[244579]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.ncKO2ywjPb
Jan 30 19:23:23 compute-0 sudo[244579]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:23:23 compute-0 sudo[244579]: pam_unix(sudo:session): session closed for user root

● session-97.scope - Session 97 of User zuul
     Loaded: loaded (/run/systemd/transient/session-97.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:23:41 UTC; 57min ago
         IO: 0B read, 8.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.8M)
        CPU: 65ms
     CGroup: /user.slice/user-1000.slice/session-97.scope
             ├─244785 "sshd-session: zuul [priv]"
             └─244807 "sshd-session: zuul@notty"

Jan 30 19:23:41 compute-0 systemd[1]: Started Session 97 of User zuul.
Jan 30 19:23:41 compute-0 sudo[244856]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:a8:68:85 -w /tmp/tmp.Wr31SpvoDO
Jan 30 19:23:41 compute-0 sudo[244856]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:25:41 compute-0 sudo[244856]: pam_unix(sudo:session): session closed for user root

● session-99.scope - Session 99 of User zuul
     Loaded: loaded (/run/systemd/transient/session-99.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:24:03 UTC; 57min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.8M)
        CPU: 64ms
     CGroup: /user.slice/user-1000.slice/session-99.scope
             ├─244996 "sshd-session: zuul [priv]"
             └─244999 "sshd-session: zuul@notty"

Jan 30 19:24:03 compute-0 systemd[1]: Started Session 99 of User zuul.
Jan 30 19:24:03 compute-0 sudo[245000]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.Wr31SpvoDO
Jan 30 19:24:03 compute-0 sudo[245000]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:24:03 compute-0 sudo[245000]: pam_unix(sudo:session): session closed for user root

○ 573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b-7930ac65c70ecc71.service - /usr/bin/podman healthcheck run 573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b
     Loaded: loaded (/run/systemd/transient/573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b-7930ac65c70ecc71.service; transient)
  Transient: yes
     Active: inactive (dead) since Fri 2026-01-30 20:20:57 UTC; 22s ago
   Duration: 117ms
TriggeredBy: ● 573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b-7930ac65c70ecc71.timer
    Process: 264305 ExecStart=/usr/bin/podman healthcheck run 573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b (code=exited, status=0/SUCCESS)
   Main PID: 264305 (code=exited, status=0/SUCCESS)
        CPU: 87ms

Jan 30 20:20:57 compute-0 podman[264305]: 2026-01-30 20:20:57.036122016 +0000 UTC m=+0.099685945 container health_status 573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': 'fb0ba4ae2b01ec1faae83a80acf21c3ad3948d5c7b1e5820f87360e814103f4d-64c7807694e82e018314df3ffb373ed2487497944b057fc7720c3c54ea99ce8f-64c7807694e82e018314df3ffb373ed2487497944b057fc7720c3c54ea99ce8f-64c7807694e82e018314df3ffb373ed2487497944b057fc7720c3c54ea99ce8f-0823bd3e096c75f72e4a95820d41b0d4b6a1172bd2892ddb9f29b788a11bc87d'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/openstack/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']}, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_id=ovn_metadata_agent, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, tcib_managed=true, container_name=ovn_metadata_agent, org.label-schema.build-date=20260127)

○ 60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5-47b3e5005a023219.service - /usr/bin/podman healthcheck run 60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5
     Loaded: loaded (/run/systemd/transient/60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5-47b3e5005a023219.service; transient)
  Transient: yes
     Active: inactive (dead) since Fri 2026-01-30 20:21:15 UTC; 3s ago
   Duration: 65ms
TriggeredBy: ● 60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5-47b3e5005a023219.timer
    Process: 267255 ExecStart=/usr/bin/podman healthcheck run 60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5 (code=exited, status=0/SUCCESS)
   Main PID: 267255 (code=exited, status=0/SUCCESS)
        CPU: 54ms

Jan 30 20:21:15 compute-0 podman[267255]: 2026-01-30 20:21:15.360069772 +0000 UTC m=+0.049854988 container health_status 60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5 (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20260127, tcib_managed=true, config_data={'command': 'kolla_start', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal', 'EDPM_CONFIG_HASH': 'fb0ba4ae2b01ec1faae83a80acf21c3ad3948d5c7b1e5820f87360e814103f4d-64c7807694e82e018314df3ffb373ed2487497944b057fc7720c3c54ea99ce8f-4513b9ade86adc87d1a6c9416d7c3bf860314bfcf0b3a2bcdbd881f6906fc595-4513b9ade86adc87d1a6c9416d7c3bf860314bfcf0b3a2bcdbd881f6906fc595'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute', 'test': '/openstack/healthcheck compute'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'net': 'host', 'restart': 'always', 'security_opt': 'label:type:ceilometer_polling_t', 'user': 'ceilometer', 'volumes': ['/var/lib/openstack/telemetry:/var/lib/kolla/config_files/src:z', '/var/lib/kolla/config_files/ceilometer_agent_compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_id=ceilometer_agent_compute, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4)

○ 60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97-29274ca09f3589dd.service - /usr/bin/podman healthcheck run 60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97
     Loaded: loaded (/run/systemd/transient/60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97-29274ca09f3589dd.service; transient)
  Transient: yes
     Active: inactive (dead) since Fri 2026-01-30 20:21:02 UTC; 16s ago
   Duration: 146ms
TriggeredBy: ● 60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97-29274ca09f3589dd.timer
    Process: 264690 ExecStart=/usr/bin/podman healthcheck run 60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97 (code=exited, status=0/SUCCESS)
   Main PID: 264690 (code=exited, status=0/SUCCESS)
        CPU: 72ms

Jan 30 20:21:02 compute-0 podman[264690]: 2026-01-30 20:21:02.260625232 +0000 UTC m=+0.128990161 container health_status 60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97 (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, tcib_managed=true, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': 'fb0ba4ae2b01ec1faae83a80acf21c3ad3948d5c7b1e5820f87360e814103f4d-64c7807694e82e018314df3ffb373ed2487497944b057fc7720c3c54ea99ce8f-64c7807694e82e018314df3ffb373ed2487497944b057fc7720c3c54ea99ce8f-64c7807694e82e018314df3ffb373ed2487497944b057fc7720c3c54ea99ce8f'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, container_name=ovn_controller, org.label-schema.build-date=20260127, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, config_id=ovn_controller, managed_by=edpm_ansible, org.label-schema.schema-version=1.0)

○ 8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a-765fa5d442117b50.service - /usr/bin/podman healthcheck run 8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a
     Loaded: loaded (/run/systemd/transient/8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a-765fa5d442117b50.service; transient)
  Transient: yes
     Active: inactive (dead) since Fri 2026-01-30 20:21:02 UTC; 17s ago
   Duration: 91ms
TriggeredBy: ● 8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a-765fa5d442117b50.timer
    Process: 264704 ExecStart=/usr/bin/podman healthcheck run 8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a (code=exited, status=0/SUCCESS)
   Main PID: 264704 (code=exited, status=0/SUCCESS)
        CPU: 79ms

Jan 30 20:21:02 compute-0 podman[264704]: 2026-01-30 20:21:02.206489501 +0000 UTC m=+0.075100158 container health_status 8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., build-date=2026-01-22T05:09:47Z, io.buildah.version=1.33.7, io.openshift.expose-servUnit apparmor.service could not be found.
Unit apt-daily.service could not be found.
Unit auto-cpufreq.service could not be found.
Unit autofs.service could not be found.
ices=, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, org.opencontainers.image.created=2026-01-22T05:09:47Z, url=https://catalog.redhat.com/en/search?searchType=containers, container_name=openstack_network_exporter, config_id=openstack_network_exporter, org.opencontainers.image.revision=812a20485e9d8d728e95b468c2886da21352b9fc, vcs-type=git, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., release=1769056855, vendor=Red Hat, Inc., managed_by=edpm_ansible, config_data={'command': [], 'environment': {'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml', 'OS_ENDPOINT_TYPE': 'internal', 'EDPM_CONFIG_HASH': '64c7807694e82e018314df3ffb373ed2487497944b057fc7720c3c54ea99ce8f-4513b9ade86adc87d1a6c9416d7c3bf860314bfcf0b3a2bcdbd881f6906fc595'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter', 'test': '/openstack/healthcheck openstack-netwo'}, 'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'net': 'host', 'ports': ['9105:9105'], 'privileged': True, 'recreate': True, 'restart': 'always', 'volumes': ['/var/lib/openstack/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, name=ubi9/ubi-minimal, vcs-ref=812a20485e9d8d728e95b468c2886da21352b9fc, distribution-scope=public, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, version=9.7, com.redhat.component=ubi9-minimal-container, maintainer=Red Hat, Inc., architecture=x86_64, cpe=cpe:/a:redhat:enterprise_linux:9::appstream, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.tags=minimal rhel9)

● auditd.service - Security Auditing Service
     Loaded: loaded (/usr/lib/systemd/system/auditd.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:auditd(8)
             https://github.com/linux-audit/audit-documentation
   Main PID: 703 (auditd)
         IO: 0B read, 28.9M written
      Tasks: 4 (limit: 100092)
     Memory: 17.1M (peak: 17.6M)
        CPU: 4.614s
     CGroup: /system.slice/auditd.service
             ├─703 /sbin/auditd
             └─705 /usr/sbin/sedispatch

Jan 30 16:49:47 localhost augenrules[723]: failure 1
Jan 30 16:49:47 localhost augenrules[723]: pid 703
Jan 30 16:49:47 localhost augenrules[723]: rate_limit 0
Jan 30 16:49:47 localhost augenrules[723]: backlog_limit 8192
Jan 30 16:49:47 localhost augenrules[723]: lost 0
Jan 30 16:49:47 localhost augenrules[723]: backlog 4
Jan 30 16:49:47 localhost augenrules[723]: backlog_wait_time 60000
Jan 30 16:49:47 localhost augenrules[723]: backlog_wait_time_actual 0
Jan 30 16:49:47 localhost systemd[1]: Started Security Auditing Service.
Jan 30 17:44:16 compute-0 auditd[703]: Audit daemon rotating log files

○ auth-rpcgss-module.service - Kernel Module supporting RPCSEC_GSS
     Loaded: loaded (/usr/lib/systemd/system/auth-rpcgss-module.service; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:49:46 UTC; 3h 31min ago

○ blk-availability.service - Availability of block devices
     Loaded: loaded (/usr/lib/systemd/system/blk-availability.service; disabled; preset: disabled)
     Active: inactive (dead)

○ ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735-302036fd2481bd88.service - /usr/bin/podman healthcheck run ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735
     Loaded: loaded (/run/systemd/transient/ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735-302036fd2481bd88.service; transient)
  Transient: yes
     Active: inactive (dead) since Fri 2026-01-30 20:21:15 UTC; 3s ago
   Duration: 75ms
TriggeredBy: ● ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735-302036fd2481bd88.timer
    Process: 267257 ExecStart=/usr/bin/podman healthcheck run ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735 (code=exited, status=0/SUCCESS)
   Main PID: 267257 (code=exited, status=0/SUCCESS)
        CPU: 67ms

Jan 30 20:21:15 compute-0 podman[267257]: 2026-01-30 20:21:15.373689858 +0000 UTC m=+0.063275428 container health_status ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735 (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'CONTAINER_HOST': 'unix:///run/podman/podman.sock', 'OS_ENDPOINT_TYPE': 'internal', 'EDPM_CONFIG_HASH': '64c7807694e82e018314df3ffb373ed2487497944b057fc7720c3c54ea99ce8f-4513b9ade86adc87d1a6c9416d7c3bf860314bfcf0b3a2bcdbd881f6906fc595'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/podman_exporter', 'test': '/openstack/healthcheck podman_exporter'}, 'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'net': 'host', 'ports': ['9882:9882'], 'privileged': True, 'recreate': True, 'restart': 'always', 'user': 'root', 'volumes': ['/var/lib/openstack/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=podman_exporter, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)

● chronyd.service - NTP client/server
     Loaded: loaded (/usr/lib/systemd/system/chronyd.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 17:30:55 UTC; 2h 50min ago
       Docs: man:chronyd(8)
             man:chrony.conf(5)
   Main PID: 64957 (chronyd)
         IO: 0B read, 8.0K written
      Tasks: 1 (limit: 100092)
     Memory: 1.0M (peak: 2.0M)
        CPU: 66ms
     CGroup: /system.slice/chronyd.service
             └─64957 /usr/sbin/chronyd -F 2

Jan 30 17:30:55 compute-0 systemd[1]: Starting NTP client/server...
Jan 30 17:30:55 compute-0 chronyd[64957]: chronyd version 4.8 starting (+CMDMON +REFCLOCK +RTC +PRIVDROP +SCFILTER +SIGND +NTS +SECHASH +IPV6 +DEBUG)
Jan 30 17:30:55 compute-0 chronyd[64957]: Frequency -31.447 +/- 0.145 ppm read from /var/lib/chrony/drift
Jan 30 17:30:55 compute-0 chronyd[64957]: Loaded seccomp filter (level 2)
Jan 30 17:30:55 compute-0 systemd[1]: Started NTP client/server.
Jan 30 17:33:05 compute-0 chronyd[64957]: Selected source 162.159.200.123 (pool.ntp.org)

● cloud-config.service - Cloud-init: Config Stage
     Loaded: loaded (/usr/lib/systemd/system/cloud-config.service; enabled; preset: disabled)
     Active: active (exited) since Fri 2026-01-30 16:49:51 UTC; 3h 31min ago
   Main PID: 1001 (code=exited, status=0/SUCCESS)
        CPU: 384ms

Jan 30 16:49:51 np0005602930.novalocal systemd[1]: Starting Cloud-init: Config Stage...
Jan 30 16:49:51 np0005602930.novalocal cloud-init[1147]: Cloud-init v. 24.4-8.el9 running 'modules:config' at Fri, 30 Jan 2026 16:49:51 +0000. Up 9.77 seconds.
Jan 30 16:49:51 np0005602930.novalocal systemd[1]: Finished Cloud-init: Config Stage.

● cloud-final.service - Cloud-init: Final Stage
     Loaded: loaded (/usr/lib/systemd/system/cloud-final.service; enabled; preset: disabled)
     Active: active (exited) since Fri 2026-01-30 16:49:51 UTC; 3h 31min ago
   Main PID: 1217 (code=exited, status=0/SUCCESS)
        CPU: 452ms

Jan 30 16:49:51 np0005602930.novalocal systemd[1]: Starting Cloud-init: Final Stage...
Jan 30 16:49:51 np0005602930.novalocal cloud-init[1339]: Cloud-init v. 24.4-8.el9 running 'modules:final' at Fri, 30 Jan 2026 16:49:51 +0000. Up 10.13 seconds.
Jan 30 16:49:51 np0005602930.novalocal cloud-init[1358]: #############################################################
Jan 30 16:49:51 np0005602930.novalocal cloud-init[1359]: -----BEGIN SSH HOST KEY FINGERPRINTS-----
Jan 30 16:49:51 np0005602930.novalocal cloud-init[1362]: 256 SHA256:gc2xHJYqKZrKU8QBcygnXPulLFQsCTJpRgG11LYmzTs root@np0005602930.novalocal (ECDSA)
Jan 30 16:49:51 np0005602930.novalocal cloud-init[1371]: 3072 SHA256:HAaH/Hqn/N4e0n/Wy+ft9Cs0MSokbIiZ4LkWp+jT3VI root@np0005602930.novalocal (RSA)
Jan 30 16:49:51 np0005602930.novalocal cloud-init[1372]: -----END SSH HOST KEY FINGERPRINTS-----
Jan 30 16:49:51 np0005602930.novalocal cloud-init[1339]: Cloud-init v. 24.4-8.el9 finished at Fri, 30 Jan 2026 16:49:51 +0000. Datasource DataSourceConfigDrive [net,ver=2][source=/dev/sr0].  Up 10.32 seconds
Jan 30 16:49:51 np0005602930.novalocal systemd[1]: Finished Cloud-init: Final Stage.

● cloud-init-local.service - Cloud-init: Local Stage (pre-network)
     Loaded: loaded (/usr/lib/systemd/system/cloud-init-local.service; enabled; preset: disabled)
     Active: active (exited) since Fri 2026-01-30 16:49:49 UTC; 3h 31min ago
   Main PID: 776 (code=exited, status=0/SUCCESS)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 17.2M (peak: 50.4M)
        CPU: 739ms
     CGroup: /system.slice/cloud-init-local.service

Jan 30 16:49:48 localhost systemd[1]: Starting Cloud-init: Local Stage (pre-network)...
Jan 30 16:49:48 localhost cloud-init[839]: Cloud-init v. 24.4-8.el9 running 'init-local' at Fri, 30 Jan 2026 16:49:48 +0000. Up 7.18 seconds.
Jan 30 16:49:49 np0005602930.novalocal systemd[1]: Finished Cloud-init: Local Stage (pre-network).

● cloud-init.service - Cloud-init: Network Stage
     Loaded: loaded (/usr/lib/systemd/system/cloud-init.service; enabled; preset: disabled)
     Active: active (exited) since Fri 2026-01-30 16:49:51 UTC; 3h 31min ago
   Main PID: 886 (code=exited, status=0/SUCCESS)
        CPU: 974ms

Jan 30 16:49:50 np0005602930.novalocal cloud-init[920]: |       . ==O.B+  |
Jan 30 16:49:50 np0005602930.novalocal cloud-init[920]: |.   .   + Bo*oo. |
Jan 30 16:49:50 np0005602930.novalocal cloud-init[920]: | o   +   = +.....|
Jan 30 16:49:50 np0005602930.novalocal cloud-init[920]: |  o o + S   ..  o|
Jan 30 16:49:50 np0005602930.novalocal cloud-init[920]: | E o + o       . |
Jan 30 16:49:50 np0005602930.novalocal cloud-init[920]: |  . o +          |
Jan 30 16:49:50 np0005602930.novalocal cloud-init[920]: |     + o         |
Jan 30 16:49:50 np0005602930.novalocal cloud-init[920]: |      . .        |
Jan 30 16:49:50 np0005602930.novalocal cloud-init[920]: +----[SHA256]-----+
Jan 30 16:49:51 np0005602930.novalocal systemd[1]: Finished Cloud-init: Network Stage.

○ cpupower.service - Configure CPU power related settings
     Loaded: loaded (/usr/lib/systemd/system/cpupower.service; disabled; preset: disabled)
     Active: inactive (dead)

● crond.service - Command Scheduler
     Loaded: loaded (/usr/lib/systemd/system/crond.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 16:49:51 UTC; 3h 31min ago
   Main PID: 1008 (crond)
         IO: 36.0K read, 12.0K written
      Tasks: 1 (limit: 100092)
     Memory: 1.3M (peak: 4.9M)
        CPU: 254ms
     CGroup: /system.slice/crond.service
             └─1008 /usr/sbin/crond -n

Jan 30 17:52:01 compute-0 anacron[7493]: Normal exit (3 jobs run)
Jan 30 18:01:01 compute-0 CROND[217313]: (root) CMD (run-parts /etc/cron.hourly)
Jan 30 18:01:01 compute-0 run-parts[217316]: (/etc/cron.hourly) starting 0anacron
Jan 30 18:01:01 compute-0 CROND[217312]: (root) CMDEND (run-parts /etc/cron.hourly)
Jan 30 19:01:01 compute-0 CROND[236019]: (root) CMD Unit display-manager.service could not be found.
(run-parts /etc/cron.hourly)
Jan 30 19:01:01 compute-0 CROND[236018]: (root) CMDEND (run-parts /etc/cron.hourly)
Jan 30 20:01:01 compute-0 CROND[258542]: (root) CMD (run-parts /etc/cron.hourly)
Jan 30 20:01:01 compute-0 run-parts[258545]: (/etc/cron.hourly) starting 0anacron
Jan 30 20:01:01 compute-0 run-parts[258551]: (/etc/cron.hourly) finished 0anacron
Jan 30 20:01:01 compute-0 CROND[258541]: (root) CMDEND (run-parts /etc/cron.hourly)

● dbus-broker.service - D-Bus System Message Bus
     Loaded: loaded (/usr/lib/systemd/system/dbus-broker.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
TriggeredBy: ● dbus.socket
       Docs: man:dbus-broker-launch(1)
   Main PID: 745 (dbus-broker-lau)
         IO: 0B read, 0B written
      Tasks: 2 (limit: 100092)
     Memory: 2.7M (peak: 3.5M)
        CPU: 4.787s
     CGroup: /system.slice/dbus-broker.service
             ├─745 /usr/bin/dbus-broker-launch --scope system --audit
             └─774 dbus-broker --log 4 --controller 9 --machine-id bf0bc0bb03de29b24cba1cc9599cf5d0 --max-bytes 536870912 --max-fds 4096 --max-matches 131072 --audit

Jan 30 17:26:17 compute-0 dbus-broker-launch[745]: Noticed file-system modification, trigger reload.
Jan 30 17:29:03 compute-0 dbus-broker-launch[774]: avc:  op=load_policy lsm=selinux seqno=7 res=1
Jan 30 17:29:13 compute-0 dbus-broker-launch[774]: avc:  op=load_policy lsm=selinux seqno=8 res=1
Jan 30 17:33:15 compute-0 dbus-broker-launch[774]: avc:  op=load_policy lsm=selinux seqno=9 res=1
Jan 30 17:36:33 compute-0 dbus-broker-launch[774]: avc:  op=load_policy lsm=selinux seqno=10 res=1
Jan 30 17:36:38 compute-0 dbus-broker-launch[774]: avc:  op=load_policy lsm=selinux seqno=11 res=1
Jan 30 17:37:35 compute-0 dbus-broker-launch[774]: avc:  op=load_policy lsm=selinux seqno=12 res=1
Jan 30 17:37:41 compute-0 dbus-broker-launch[745]: Noticed file-system modification, trigger reload.
Jan 30 17:37:41 compute-0 dbus-broker-launch[745]: Noticed file-system modification, trigger reload.
Jan 30 17:39:23 compute-0 dbus-broker-launch[774]: avc:  op=load_policy lsm=selinux seqno=13 res=1

○ dm-event.service - Device-mapper event daemon
     Loaded: loaded (/usr/lib/systemd/system/dm-event.service; static)
     Active: inactive (dead)
TriggeredBy: ● dm-event.socket
       Docs: man:dmeventd(8)

○ dnf-makecache.service - dnf makecache
     Loaded: loaded (/usr/lib/systemd/system/dnf-makecache.service; static)
     Active: inactive (dead) since Fri 2026-01-30 18:41:57 UTC; 1h 39min ago
TriggeredBy: ● dnf-makecache.timer
    Process: 230151 ExecStart=/usr/bin/dnf makecache --timer (code=exited, status=0/SUCCESS)
   Main PID: 230151 (code=exited, status=0/SUCCESS)
        CPU: 238ms

Jan 30 18:41:56 compute-0 systemd[1]: Starting dnf makecache...
Jan 30 18:41:57 compute-0 dnf[230151]: Metadata cache refreshed recently.
Jan 30 18:41:57 compute-0 systemd[1]: dnf-makecache.service: Deactivated successfully.
Jan 30 18:41:57 compute-0 systemd[1]: Finished dnf makecache.

○ dracut-cmdline.service - dracut cmdline hook
     Loaded: loaded (/usr/lib/systemd/system/dracut-cmdline.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:45 UTC; 3h 31min ago
   Duration: 1.611s
       Docs: man:dracut-cmdline.service(8)
             man:dracut.bootup(7)
   Main PID: 329 (code=exited, status=0/SUCCESS)
        CPU: 113ms

Jan 30 16:49:43 localhost systemd[1]: Starting dracut cmdline hook...
Jan 30 16:49:43 localhost dracut-cmdline[329]: dracut-9 dracut-057-102.git20250818.el9
Jan 30 16:49:43 localhost dracut-cmdline[329]: Using kernel command line parameters:    BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-5.14.0-665.el9.x86_64 root=UUID=822f14ea-6e7e-41df-b0d8-fbe282d9ded8 ro console=ttyS0,115200n8 no_timer_check net.ifnames=0 crashkernel=1G-2G:192M,2G-64G:256M,64G-:512M
Jan 30 16:49:43 localhost systemd[1]: Finished dracut cmdline hook.
Jan 30 16:49:45 localhost systemd[1]: dracut-cmdline.service: Deactivated successfully.
Jan 30 16:49:45 localhost systemd[1]: Stopped dracut cmdline hook.

○ dracut-initqueue.service - dracut initqueue hook
     Loaded: loaded (/usr/lib/systemd/system/dracut-initqueue.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:45 UTC; 3h 31min ago
   Duration: 773ms
       Docs: man:dracut-initqueue.service(8)
             man:dracut.bootup(7)
   Main PID: 487 (code=exited, status=0/SUCCESS)
        CPU: 38ms

Jan 30 16:49:43 localhost systemd[1]: Starting dracut initqueue hook...
Jan 30 16:49:44 localhost systemd[1]: Finished dracut initqueue hook.
Jan 30 16:49:45 localhost systemd[1]: dracut-initqueue.service: Deactivated successfully.
Jan 30 16:49:45 localhost systemd[1]: Stopped dracut initqueue hook.

○ dracut-mount.service - dracut mount hook
     Loaded: loaded (/usr/lib/systemd/system/dracut-mount.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:45 UTC; 3h 31min ago
   Duration: 131ms
       Docs: man:dracut-mount.service(8)
             man:dracut.bootup(7)
   Main PID: 570 (code=exited, status=0/SUCCESS)
        CPU: 10ms

Jan 30 16:49:44 localhost systemd[1]: Starting dracut mount hook...
Jan 30 16:49:44 localhost systemd[1]: Finished dracut mount hook.
Jan 30 16:49:45 localhost systemd[1]: dracut-mount.service: Deactivated successfully.
Jan 30 16:49:45 localhost systemd[1]: Stopped dracut mount hook.

○ dracut-pre-mount.service - dracut pre-mount hook
     Loaded: loaded (/usr/lib/systemd/system/dracut-pre-mount.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:45 UTC; 3h 31min ago
   Duration: 738ms
       Docs: man:dracut-pre-mount.service(8)
             man:dracut.bootup(7)
   Main PID: 547 (code=exited, status=0/SUCCESS)
        CPU: 7ms

Jan 30 16:49:44 localhost systemd[1]: Starting dracut pre-mount hook...
Jan 30 16:49:44 localhost systemd[1]: Finished dracut pre-mount hook.
Jan 30 16:49:45 localhost systemd[1]: dracut-pre-mount.service: Deactivated successfully.
Jan 30 16:49:45 localhost systemd[1]: Stopped dracut pre-mount hook.

○ dracut-pre-pivot.service - dracut pre-pivot and cleanup hook
     Loaded: loaded (/usr/lib/systemd/system/dracut-pre-pivot.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:44 UTC; 3h 31min ago
   Duration: 46ms
       Docs: man:dracut-pre-pivot.service(8)
             man:dracut.bootup(7)
   Main PID: 575 (code=exited, status=0/SUCCESS)
        CPU: 64ms

Jan 30 16:49:44 localhost systemd[1]: Starting dracut pre-pivot and cleanup hook...
Jan 30 16:49:44 localhost systemd[1]: Finished dracut pre-pivot and cleanup hook.
Jan 30 16:49:44 localhost systemd[1]: dracut-pre-pivot.service: Deactivated successfully.
Jan 30 16:49:44 localhost systemd[1]: Stopped dracut pre-pivot and cleanup hook.

○ dracut-pre-trigger.service - dracut pre-trigger hook
     Loaded: loaded (/usr/lib/systemd/system/dracut-pre-trigger.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:45 UTC; 3h 31min ago
   Duration: 1.265s
       Docs: man:dracut-pre-trigger.service(8)
             man:dracut.bootup(7)
   Main PID: 470 (code=exited, status=0/SUCCESS)
        CPU: 19ms

Jan 30 16:49:43 localhost systemd[1]: Starting dracut pre-trigger hook...
Jan 30 16:49:43 localhost systemd[1]: Finished dracut pre-trigger hook.
Jan 30 16:49:45 localhost systemd[1]: dracut-pre-trigger.service: Deactivated successfully.
Jan 30 16:49:45 localhost systemd[1]: Stopped dracut pre-trigger hook.

○ dracut-pre-udev.service - dracut pre-udev hook
     Loaded: loaded (/usr/lib/systemd/system/dracut-pre-udev.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:45 UTC; 3h 31min ago
   Duration: 1.344s
       Docs: man:dracut-pre-udev.service(8)
             man:dracut.bootup(7)
   Main PID: 417 (code=exited, status=0/SUCCESS)
        CPU: 249ms

Jan 30 16:49:43 localhost systemd[1]: Starting dracut pre-udev hook...
Jan 30 16:49:43 localhost rpc.statd[446]: Version 2.5.4 starting
Jan 30 16:49:43 localhost rpc.statd[446]: Initializing NSM state
Jan 30 16:49:43 localhost rpc.idmapd[451]: Setting log level to 0
Jan 30 16:49:43 localhost systemd[1]: Finished dracut pre-udev hook.
Jan 30 16:49:44 localhost rpc.idmapd[451]: exiting on signal 15
Jan 30 16:49:45 localhost systemd[1]: dracut-pre-udev.service: Deactivated successfully.
Jan 30 16:49:45 localhost systemd[1]: Stopped dracut pre-udev hook.

○ dracut-shutdown-onfailure.service - Service executing upon dracut-shutdown failure to perform cleanup
     Loaded: loaded (/usr/lib/systemd/system/dracut-shutdown-onfailure.service; static)
     Active: inactive (dead)
       Docs: man:dracut-shutdown.service(8)

● dracut-shutdown.service - Restore /run/initramfs on shutdown
     Loaded: loaded (/usr/lib/systemd/system/dracut-shutdown.service; static)
     Active: active (exited) since Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
       Docs: man:dracut-shutdown.service(8)
   Main PID: 777 (code=exited, status=0/SUCCESS)
        CPU: 2ms

Jan 30 16:49:48 localhost systemd[1]: Starting Restore /run/initramfs on shutdown...
Jan 30 16:49:48 localhost systemd[1]: Finished Restore /run/initramfs on shutdown.

○ e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0-5fafc72f56285aa6.service - /usr/bin/podman healthcheck run e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0
     Loaded: loaded (/run/systemd/transient/e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0-5fafc72f56285aa6.service; transient)
  Transient: yes
     Active: inactive (dead) since Fri 2026-01-30 20:20:57 UTC; 22s ago
   Duration: 98ms
TriggeredBy: ● e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0-5fafc72f56285aa6.timer
    Process: 264306 ExecStart=/usr/bin/podman healthcheck run e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0 (code=exited, status=0/SUCCESS)
   Main PID: 264306 (code=exited, status=0/SUCCESS)
        CPU: 110ms

Jan 30 20:20:57 compute-0 podman[264306]: 2026-01-30 20:20:57.019199767 +0000 UTC m=+0.078030488 container health_status e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'EDPM_CONFIG_HASH': '64c7807694e82e018314df3ffb373ed2487497944b057fc7720c3c54ea99ce8f-4513b9ade86adc87d1a6c9416d7c3bf860314bfcf0b3a2bcdbd881f6906fc595'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/node_exporter', 'test': '/openstack/healthcheck node_exporter'}, 'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'net': 'host', 'ports': ['9100:9100'], 'privileged': True, 'recreate': True, 'restart': 'always', 'user': 'root', 'volumes': ['/var/lib/openstack/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=node_exporter, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible)

● edpm-container-shutdown.service - EDPM Container Shutdown
     Loaded: loaded (/etc/systemd/system/edpm-container-shutdown.service; enabled; preset: enabled)
     Active: active (exited) since Fri 2026-01-30 17:31:13 UTC; 2h 50min ago
       Docs: https://github.com/openstack-k8s-operators/docs
   Main PID: 67451 (code=exited, status=0/SUCCESS)
        CPU: 2ms

Jan 30 17:31:13 compute-0 systemd[1]: Starting EDPM Container Shutdown...
Jan 30 17:31:13 compute-0 systemd[1]: Finished EDPM Container Shutdown.

● edpm_ceilometer_agent_compute.service - ceilometer_agent_compute container
     Loaded: loaded (/etc/systemd/system/edpm_ceilometer_agent_compute.service; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:43:22 UTC; 2h 37min ago
   Main PID: 192799 (conmon)
         IO: 0B read, 557.5K written
      Tasks: 1 (limit: 100092)
     Memory: 708.0K (peak: 17.7M)
        CPU: 719ms
     CGroup: /system.slice/edpm_ceilometer_agent_compute.service
             └─192799 /usr/bin/conmon --api-version 1 -c 60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5 -u 60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5/userdata -p /run/containers/storage/overlay-containers/60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5/userdata/pidfile -n ceilometer_agent_compute --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5/userdata/oci-log --conmon-pidfile /run/ceilometer_agent_compute.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5

Jan 30 20:19:23 compute-0 ceilometer_agent_compute[192799]: 2026-01-30 20:19:23.585 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Jan 30 20:19:23 compute-0 ceilometer_agent_compute[192799]: 2026-01-30 20:19:23.585 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Jan 30 20:19:23 compute-0 ceilometer_agent_compute[192799]: 2026-01-30 20:19:23.585 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Jan 30 20:19:23 compute-0 ceilometer_agent_compute[192799]: 2026-01-30 20:19:23.585 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Jan 30 20:19:23 compute-0 ceilometer_agent_compute[192799]: 2026-01-30 20:19:23.585 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Jan 30 20:19:23 compute-0 ceilometer_agent_compute[192799]: 2026-01-30 20:19:23.585 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Jan 30 20:19:23 compute-0 ceilometer_agent_compute[192799]: 2026-01-30 20:19:23.585 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Jan 30 20:19:23 compute-0 ceilometer_agent_compute[192799]: 2026-01-30 20:19:23.585 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Jan 30 20:19:23 compute-0 ceilometer_agent_compute[192799]: 2026-01-30 20:19:23.585 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Jan 30 20:19:23 compute-0 ceilometer_agent_compute[192799]: 2026-01-30 20:19:23.585 12 ERROR oslo_messaging.notify.messaging 

○ edpm_libvirt_guests.service - Suspend libvirt Guests in edpm
     Loaded: loaded (/etc/systemd/system/edpm_libvirt_guests.service; enabled; preset: disabled)
     Active: inactive (dead)
       Docs: man:libvirtd(8)
             https://libvirt.org

● edpm_node_exporter.service - node_exporter container
     Loaded: loaded (/etc/systemd/system/edpm_node_exporter.service; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:43:37 UTC; 2h 37min ago
   Main PID: 195796 (conmon)
         IO: 0B read, 132.0K written
      Tasks: 1 (limit: 100092)
     Memory: 688.0K (peak: 17.9M)
        CPU: 100ms
     CGroup: /system.slice/edpm_node_exporter.service
             └─195796 /usr/bin/conmon --api-version 1 -c e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0 -u e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0/userdata -p /run/containers/storage/overlay-containers/e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0/userdata/pidfile -n node_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0/userdata/oci-log --conmon-pidfile /run/node_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0

Jan 30 17:43:37 compute-0 node_exporter[195796]: ts=2026-01-30T17:43:37.016Z caller=node_exporter.go:117 level=info collector=tapestats
Jan 30 17:43:37 compute-0 node_exporter[195796]: ts=2026-01-30T17:43:37.016Z caller=node_exporter.go:117 level=info collector=udp_queues
Jan 30 17:43:37 compute-0 node_exporter[195796]: ts=2026-01-30T17:43:37.016Z caller=node_exporter.go:117 level=info collector=vmstat
Jan 30 17:43:37 compute-0 node_exporter[195796]: ts=2026-01-30T17:43:37.016Z caller=node_exporter.go:117 level=info collector=xfs
Jan 30 17:43:37 compute-0 node_exporter[195796]: ts=2026-01-30T17:43:37.016Z caller=node_exporter.go:117 level=info collector=zfs
Jan 30 17:43:37 compute-0 node_exporter[195796]: ts=2026-01-30T17:43:37.017Z caller=tls_config.go:232 level=info msg="Listening on" address=[::]:9100
Jan 30 17:43:37 compute-0 node_exporter[195796]: ts=2026-01-30T17:43:37.017Z caller=tls_config.go:268 level=info msg="TLS is enabled." http2=true address=[::]:9100
Jan 30 17:43:37 compute-0 podman[195780]: 2026-01-30 17:43:37.02723863 +0000 UTC m=+0.132314527 container start e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0 (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, managed_by=edpm_ansible, config_data={'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'EDPM_CONFIG_HASH': '64c7807694e82e018314df3ffb373ed2487497944b057fc7720c3c54ea99ce8f-4513b9ade86adc87d1a6c9416d7c3bf860314bfcf0b3a2bcdbd881f6906fc595'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/node_exporter', 'test': '/openstack/healthcheck node_exporter'}, 'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'net': 'host', 'ports': ['9100:9100'], 'privileged': True, 'recreate': True, 'restart': 'always', 'user': 'root', 'volumes': ['/var/lib/openstack/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=node_exporter, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>)
Jan 30 17:43:37 compute-0 podman[195780]: node_exporter
Jan 30 17:43:37 compute-0 systemd[1]: Started node_exporter container.

● edpm_nova_compute.service - nova_compute container
     Loaded: loaded (/etc/systemd/system/edpm_nova_compute.service; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:42:22 UTC; 2h 38min ago
   Main PID: 183115 (conmon)
         IO: 0B read, 90.5K written
      Tasks: 1 (limit: 100092)
     Memory: 688.0K (peak: 17.1M)
        CPU: 1.590s
     CGroup: /system.slice/edpm_nova_compute.service
             └─183115 /usr/bin/conmon --api-version 1 -c 925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e -u 925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e/userdata -p /run/containers/storage/overlay-containers/925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e/userdata/pidfile -n nova_compute --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e/userdata/oci-log --conmon-pidfile /run/nova_compute.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e

Jan 30 20:21:02 compute-0 nova_compute[183115]: 2026-01-30 20:21:02.394 183119 DEBUG nova.compute.manager [None req-1257b01d-d285-41ed-a5cf-5c363b6ab87f - - - - - -] [instance: 69c40217-ae22-4704-ad01-f2ca06c42d58] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929[00m
Jan 30 20:21:02 compute-0 nova_compute[183115]: 2026-01-30 20:21:02.394 183119 DEBUG oslo_service.periodic_task [None req-1257b01d-d285-41ed-a5cf-5c363b6ab87f - - - - - -] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210[00m
Jan 30 20:21:03 compute-0 nova_compute[183115]: 2026-01-30 20:21:03.392 183119 DEBUG oslo_service.periodic_task [None req-1257b01d-d285-41ed-a5cf-5c363b6ab87f - - - - - -] Running periodic task ComputeManager._poll_unconfirmed_resizes run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210[00m
Jan 30 20:21:04 compute-0 nova_compute[183115]: 2026-01-30 20:21:04.709 183119 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263[00m
Jan 30 20:21:05 compute-0 nova_compute[183115]: 2026-01-30 20:21:05.953 183119 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263[00m
Jan 30 20:21:09 compute-0 nova_compute[183115]: 2026-01-30 20:21:09.710 183119 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263[00m
Jan 30 20:21:10 compute-0 nova_compute[183115]: 2026-01-30 20:21:10.954 183119 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263[00m
Jan 30 20:21:14 compute-0 nova_compute[183115]: 2026-01-30 20:21:14.714 183119 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263[00m
Jan 30 20:21:15 compute-0 nova_compute[183115]: 2026-01-30 20:21:15.957 183119 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263[00m
Jan 30 20:21:18 compute-0 nova_compute[183115]: 2026-01-30 20:21:18.343 183119 DEBUG oslo_service.periodic_task [None req-1257b01d-d285-41ed-a5cf-5c363b6ab87f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210[00m

● edpm_openstack_network_exporter.service - openstack_network_exporter container
     Loaded: loaded (/etc/systemd/system/edpm_openstack_network_exporter.service; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:44:14 UTC; 2h 37min ago
    Process: 202032 ExecStart=/usr/bin/podman start openstack_network_exporter (code=exited, status=0/SUCCESS)
   Main PID: 202048 (conmon)
         IO: 0B read, 122.5K written
      Tasks: 1 (limit: 100092)
     Memory: 684.0K (peak: 17.7M)
        CPU: 105ms
     CGroup: /system.slice/edpm_openstack_network_exporter.service
             └─202048 /usr/bin/conmon --api-version 1 -c 8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a -u 8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a/userdata -p /run/containers/storage/overlay-containers/8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a/userdata/pidfile -n openstack_network_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a/userdata/oci-log --conmon-pidfile /run/openstack_network_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a

Jan 30 17:44:14 compute-0 openstack_network_exporter[202048]: INFO    17:44:14 main.go:55: *ovnnorthd.Collector not registered, metric set not enabled
Jan 30 17:44:14 compute-0 openstack_network_exporter[202048]: INFO    17:44:14 main.go:48: registering *ovn.Collector
Jan 30 17:44:14 compute-0 openstack_network_exporter[202048]: INFO    17:44:14 main.go:55: *ovsdbserver.Collector not registered, metric set not enabled
Jan 30 17:44:14 compute-0 openstack_network_exporter[202048]: INFO    17:44:14 main.go:48: registering *pmd_perf.Collector
Jan 30 17:44:14 compute-0 openstack_network_exporter[202048]: INFO    17:44:14 main.go:48: registering *pmd_rxq.Collector
Jan 30 17:44:14 compute-0 openstack_network_exporter[202048]: INFO    17:44:14 main.go:48: registering *vswitch.Collector
Jan 30 17:44:14 compute-0 openstack_network_exporter[202048]: NOTICE  17:44:14 main.go:76: listening on https://:9105/metrics
Jan 30 17:44:14 compute-0 podman[202032]: 2026-01-30 17:44:14.550839973 +0000 UTC m=+0.442874218 container start 8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, config_data={'command': [], 'environment': {'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml', 'OS_ENDPOINT_TYPE': 'internal', 'EDPM_CONFIG_HASH': '64c7807694e82e018314df3ffb373ed2487497944b057fc7720c3c54ea99ce8f-4513b9ade86adc87d1a6c9416d7c3bf860314bfcf0b3a2bcdbd881f6906fc595'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter', 'test': '/openstack/healthcheck openstack-netwo'}, 'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'net': 'host', 'ports': ['9105:9105'], 'privileged': True, 'recreate': True, 'restart': 'always', 'volumes': ['/var/lib/openstack/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, url=https://catalog.redhat.com/en/search?searchType=containers, com.redhat.component=ubi9-minimal-container, distribution-scope=public, architecture=x86_64, build-date=2026-01-22T05:09:47Z, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.openshift.tags=minimal rhel9, managed_by=edpm_ansible, org.opencontainers.image.created=2026-01-22T05:09:47Z, org.opencontainers.image.revision=812a20485e9d8d728e95b468c2886da21352b9fc, vcs-ref=812a20485e9d8d728e95b468c2886da21352b9fc, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., vendor=Red Hat, Inc., cpe=cpe:/a:redhat:enterprise_linux:9::appstream, io.buildah.version=1.33.7, release=1769056855, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., config_id=openstack_network_exporter, io.openshift.expose-services=, version=9.7, container_name=openstack_network_exporter, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, maintainer=Red Hat, Inc., name=ubi9/ubi-minimal, vcs-type=git)
Jan 30 17:44:14 compute-0 podman[202032]: openstack_network_exporter
Jan 30 17:44:14 compute-0 systemd[1]: Started openstack_network_exporter container.

● edpm_ovn_controller.service - ovn_controller container
     Loaded: loaded (/etc/systemd/system/edpm_ovn_controller.service; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:34:06 UTC; 2h 47min ago
   Main PID: 95416 (conmon)
         IO: 0B read, 122.0K written
      Tasks: 1 (limit: 100092)
     Memory: 688.0K (peak: 20.2M)
        CPU: 271ms
     CGroup: /system.slice/edpm_ovn_controller.service
             └─95416 /usr/bin/conmon --api-version 1 -c 60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97 -u 60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97/userdata -p /run/containers/storage/overlay-containers/60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97/userdata/pidfile -n ovn_controller --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97/userdata/oci-log --conmon-pidfile /run/ovn_controller.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97

Jan 30 20:17:56 compute-0 ovn_controller[95416]: 2026-01-30T20:17:56Z|00730|pinctrl|WARN|Dropped 1263 log messages in last 58 seconds (most recently, 4 seconds ago) due to excessive rate
Jan 30 20:17:56 compute-0 ovn_controller[95416]: 2026-01-30T20:17:56Z|00731|pinctrl|WARN|IGMP Querier enabled without a valid IPv4 or IPv6 address
Jan 30 20:18:41 compute-0 ovn_controller[95416]: 2026-01-30T20:18:41Z|00732|memory_trim|INFO|Detected inactivity (last active 30002 ms ago): trimming memory
Jan 30 20:18:58 compute-0 ovn_controller[95416]: 2026-01-30T20:18:58Z|00733|pinctrl|WARN|Dropped 735 log messages in last 62 seconds (most recently, 17 seconds ago) due to excessive rate
Jan 30 20:18:58 compute-0 ovn_controller[95416]: 2026-01-30T20:18:58Z|00734|pinctrl|WARN|IGMP Querier enabled without a valid IPv4 or IPv6 address
Jan 30 20:19:56 compute-0 ovn_controller[95416]: 2026-01-30T20:19:56Z|00735|pinctrl|WARN|Dropped 475 log messages in last 57 seconds (most recently, 0 seconds ago) due to excessive rate
Jan 30 20:19:56 compute-0 ovn_controller[95416]: 2026-01-30T20:19:56Z|00736|pinctrl|WARN|IGMP Querier enabled without a valid IPv4 or IPv6 address
Jan 30 20:21:02 compute-0 ovn_controller[95416]: 2026-01-30T20:21:02Z|00737|pinctrl|WARN|Dropped 1157 log messages in last 66 seconds (most recently, 7 seconds ago) due to excessive rate
Jan 30 20:21:02 compute-0 ovn_controller[95416]: 2026-01-30T20:21:02Z|00738|pinctrl|WARN|IGMP Querier enabled without a valid IPv4 or IPv6 address
Jan 30 20:21:06 compute-0 ovn_controller[95416]: 2026-01-30T20:21:06Z|00739|memory_trim|INFO|Detected inactivity (last active 30005 ms ago): trimming memory

● edpm_ovn_metadata_agent.service - ovn_metadata_agent container
     Loaded: loaded (/etc/systemd/system/edpm_ovn_metadata_agent.service; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:35:02 UTC; 2h 46min ago
   Main PID: 104653 (conmon)
         IO: 0B read, 115.0K written
      Tasks: 1 (limit: 100092)
     Memory: 712.0K (peak: 18.1M)
        CPU: 369ms
     CGroup: /system.slice/edpm_ovn_metadata_agent.service
             └─104653 /usr/bin/conmon --api-version 1 -c 573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b -u 573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b/userdata -p /run/containers/storage/overlay-containers/573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b/userdata/pidfile -n ovn_metadata_agent --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b/userdata/oci-log --conmon-pidfile /run/ovn_metadata_agent.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b

Jan 30 20:19:37 compute-0 ovn_metadata_agent[104653]: 2026-01-30 20:19:37.242 104658 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=6a98adb6-6cf2-49e6-9a5d-da14fa0a5a9a, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '86'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89[00m
Jan 30 20:20:04 compute-0 ovn_metadata_agent[104653]: 2026-01-30 20:20:04.249 104658 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404[00m
Jan 30 20:20:04 compute-0 ovn_metadata_agent[104653]: 2026-01-30 20:20:04.250 104658 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409[00m
Jan 30 20:20:04 compute-0 ovn_metadata_agent[104653]: 2026-01-30 20:20:04.250 104658 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423[00m
Jan 30 20:20:40 compute-0 ovn_metadata_agent[104653]: 2026-01-30 20:20:40.446 104658 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=87, options={'arp_ns_explicit_output': 'true', 'mac_prefix': '3a:a7:05', 'max_tunid': '16711680', 'northd_internal_version': '24.03.8-20.33.0-76.8', 'svc_monitor_mac': 'c2:dd:3d:53:c2:99'}, ipsec=False) old=SB_Global(nb_cfg=86) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43[00m
Jan 30 20:20:40 compute-0 ovn_metadata_agent[104653]: 2026-01-30 20:20:40.448 104658 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 8 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274[00m
Jan 30 20:20:48 compute-0 ovn_metadata_agent[104653]: 2026-01-30 20:20:48.450 104658 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=6a98adb6-6cf2-49e6-9a5d-da14fa0a5a9a, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '87'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89[00m
Jan 30 20:21:04 compute-0 ovn_metadata_agent[104653]: 2026-01-30 20:21:04.249 104658 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404[00m
Jan 30 20:21:04 compute-0 ovn_metadata_agent[104653]: 2026-01-30 20:21:04.251 104658 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.002s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409[00m
Jan 30 20:21:04 compute-0 ovn_metadata_agent[104653]: 2026-01-30 20:21:04.251 104658 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423[00m

● edpm_podman_exporter.service - podman_exporter container
     Loaded: loaded (/etc/systemd/system/edpm_podman_exporter.service; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:43:54 UTC; 2h 37min ago
   Main PID: 198925 (conmon)
         IO: 0B read, 130.5K written
      Tasks: 1 (limit: 100092)
     Memory: 692.0K (peak: 17.0M)
        CPU: 97ms
     CGroup: /system.slice/edpm_podman_exporter.service
             └─198925 /usr/bin/conmon --api-version 1 -c ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735 -u ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735/userdata -p /run/containers/storage/overlay-containers/ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735/userdata/pidfile -n podman_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735/userdata/oci-log --conmon-pidfile /run/podman_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735

Jan 30 17:43:54 compute-0 podman_exporter[198925]: ts=2026-01-30T17:43:54.503Z caller=exporter.go:68 level=info msg="Starting podman-prometheus-exporter" version="(version=1.10.1, branch=HEAD, revision=1)"
Jan 30 17:43:54 compute-0 podman_exporter[198925]: ts=2026-01-30T17:43:54.503Z caller=exporter.go:69 level=info msg=metrics enhanced=false
Jan 30 17:43:54 compute-0 podman_exporter[198925]: ts=2026-01-30T17:43:54.503Z caller=handler.go:94 level=info msg="enabled collectors"
Jan 30 17:43:54 compute-0 podman_exporter[198925]: ts=2026-01-30T17:43:54.503Z caller=handler.go:105 level=info collector=container
Jan 30 17:43:54 compute-0 podman[198910]: 2026-01-30 17:43:54.518511036 +0000 UTC m=+0.248072938 container start ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735 (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, config_id=podman_exporter, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible, config_data={'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'CONTAINER_HOST': 'unix:///run/podman/podman.sock', 'OS_ENDPOINT_TYPE': 'internal', 'EDPM_CONFIG_HASH': '64c7807694e82e018314df3ffb373ed2487497944b057fc7720c3c54ea99ce8f-4513b9ade86adc87d1a6c9416d7c3bf860314bfcf0b3a2bcdbd881f6906fc595'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/podman_exporter', 'test': '/openstack/healthcheck podman_exporter'}, 'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'net': 'host', 'ports': ['9882:9882'], 'privileged': True, 'recreate': True, 'restart': 'always', 'user': 'root', 'volumes': ['/var/lib/openstack/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certsUnit fcoe.service could not be found.
Unit hv_kvp_daemon.service could not be found.
/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']})
Jan 30 17:43:54 compute-0 podman[198910]: podman_exporter
Jan 30 17:43:54 compute-0 systemd[1]: Started podman_exporter container.
Jan 30 17:43:54 compute-0 podman_exporter[198925]: ts=2026-01-30T17:43:54.652Z caller=exporter.go:96 level=info msg="Listening on" address=:9882
Jan 30 17:43:54 compute-0 podman_exporter[198925]: ts=2026-01-30T17:43:54.653Z caller=tls_config.go:313 level=info msg="Listening on" address=[::]:9882
Jan 30 17:43:54 compute-0 podman_exporter[198925]: ts=2026-01-30T17:43:54.653Z caller=tls_config.go:349 level=info msg="TLS is enabled." http2=true address=[::]:9882

○ emergency.service - Emergency Shell
     Loaded: loaded (/usr/lib/systemd/system/emergency.service; static)
     Active: inactive (dead)
       Docs: man:sulogin(8)

● getty@tty1.service - Getty on tty1
     Loaded: loaded (/usr/lib/systemd/system/getty@.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 16:49:51 UTC; 3h 31min ago
       Docs: man:agetty(8)
             man:systemd-getty-generator(8)
             http://0pointer.de/blog/projects/serial-console.html
   Main PID: 1010 (agetty)
         IO: 0B read, 0B written
      Tasks: 1 (limit: 100092)
     Memory: 220.0K (peak: 440.0K)
        CPU: 5ms
     CGroup: /system.slice/system-getty.slice/getty@tty1.service
             └─1010 /sbin/agetty -o "-p -- \\u" --noclear - linux

Jan 30 16:49:51 np0005602930.novalocal systemd[1]: Started Getty on tty1.

● gssproxy.service - GSSAPI Proxy Daemon
     Loaded: loaded (/usr/lib/systemd/system/gssproxy.service; disabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 16:49:49 UTC; 3h 31min ago
   Main PID: 873 (gssproxy)
         IO: 0B read, 0B written
      Tasks: 6 (limit: 100092)
     Memory: 1.8M (peak: 3.4M)
        CPU: 24ms
     CGroup: /system.slice/gssproxy.service
             └─873 /usr/sbin/gssproxy -D

Jan 30 16:49:49 np0005602930.novalocal systemd[1]: Starting GSSAPI Proxy Daemon...
Jan 30 16:49:49 np0005602930.novalocal systemd[1]: Started GSSAPI Proxy Daemon.

○ import-state.service - Import network configuration from initramfs
     Loaded: loaded (/usr/lib/systemd/system/import-state.service; enabled; preset: enabled)
     Active: inactive (dead)

○ initrd-cleanup.service - Cleaning Up and Shutting Down Daemons
     Loaded: loaded (/usr/lib/systemd/system/initrd-cleanup.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:45 UTC; 3h 31min ago
   Main PID: 617 (code=exited, status=0/SUCCESS)
        CPU: 8ms

Jan 30 16:49:44 localhost systemd[1]: Starting Cleaning Up and Shutting Down Daemons...
Jan 30 16:49:45 localhost systemd[1]: initrd-cleanup.service: Deactivated successfully.
Jan 30 16:49:45 localhost systemd[1]: Finished Cleaning Up and Shutting Down Daemons.

○ initrd-parse-etc.service - Mountpoints Configured in the Real Root
     Loaded: loaded (/usr/lib/systemd/system/initrd-parse-etc.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:44 UTC; 3h 31min ago
   Main PID: 569 (code=exited, status=0/SUCCESS)
        CPU: 10ms

Jan 30 16:49:44 localhost systemd[1]: Starting Mountpoints Configured in the Real Root...
Jan 30 16:49:44 localhost systemd[1]: initrd-parse-etc.service: Deactivated successfully.
Jan 30 16:49:44 localhost systemd[1]: Finished Mountpoints Configured in the Real Root.

○ initrd-switch-root.service - Switch Root
     Loaded: loaded (/usr/lib/systemd/system/initrd-switch-root.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
   Main PID: 623 (code=exited, status=0/SUCCESS)
        CPU: 5ms

Jan 30 16:49:45 localhost systemd[1]: Starting Switch Root...

○ initrd-udevadm-cleanup-db.service - Cleanup udev Database
     Loaded: loaded (/usr/lib/systemd/system/initrd-udevadm-cleanup-db.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:45 UTC; 3h 31min ago
   Main PID: 621 (code=exited, status=0/SUCCESS)
        CPU: 6ms

Jan 30 16:49:45 localhost systemd[1]: Starting Cleanup udev Database...
Jan 30 16:49:45 localhost systemd[1]: initrd-udevadm-cleanup-db.service: Deactivated successfully.
Jan 30 16:49:45 localhost systemd[1]: Finished Cleanup udev Database.

○ ip6tables.service - IPv6 firewall with ip6tables
     Loaded: loaded (/usr/lib/systemd/system/ip6tables.service; disabled; preset: disabled)
     Active: inactive (dead)

○ iptables.service - IPv4 firewall with iptables
     Loaded: loaded (/usr/lib/systemd/system/iptables.service; disabled; preset: disabled)
     Active: inactive (dead) since Fri 2026-01-30 17:31:22 UTC; 2h 49min ago
   Duration: 41min 33.637s
   Main PID: 778 (code=exited, status=0/SUCCESS)
        CPU: 93ms

Jan 30 16:49:48 localhost systemd[1]: Starting IPv4 firewall with iptables...
Jan 30 16:49:48 localhost iptables.init[778]: iptables: Applying firewall rules: [  OK  ]
Jan 30 16:49:48 localhost systemd[1]: Finished IPv4 firewall with iptables.
Jan 30 17:31:21 compute-0 systemd[1]: Stopping IPv4 firewall with iptables...
Jan 30 17:31:22 compute-0 iptables.init[68702]: iptables: Setting chains to policy ACCEPT: raw mangle filter nat [  OK  ]
Jan 30 17:31:22 compute-0 iptables.init[68702]: iptables: Flushing firewall rules: [  OK  ]
Jan 30 17:31:22 compute-0 systemd[1]: iptables.service: Deactivated successfully.
Jan 30 17:31:22 compute-0 systemd[1]: Stopped IPv4 firewall with iptables.

● irqbalance.service - irqbalance daemon
     Loaded: loaded (/usr/lib/systemd/system/irqbalance.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
       Docs: man:irqbalance(1)
             https://github.com/Irqbalance/irqbalance
   Main PID: 781 (irqbalance)
         IO: 0B read, 0B written
      Tasks: 2 (limit: 100092)
     Memory: 1.1M (peak: 1.5M)
        CPU: 728ms
     CGroup: /system.slice/irqbalance.service
             └─781 /usr/sbin/irqbalance

Jan 30 16:49:57 np0005602930.novalocal irqbalance[781]: Cannot change IRQ 32 affinity: Operation not permitted
Jan 30 16:49:57 np0005602930.novalocal irqbalance[781]: IRQ 32 affinity is now unmanaged
Jan 30 16:49:57 np0005602930.novalocal irqbalance[781]: Cannot change IRQ 30 affinity: Operation not permitted
Jan 30 16:49:57 np0005602930.novalocal irqbalance[781]: IRQ 30 affinity is now unmanaged
Jan 30 16:49:57 np0005602930.novalocal irqbalance[781]: Cannot change IRQ 29 affinity: Operation not permitted
Jan 30 16:49:57 np0005602930.novalocal irqbalance[781]: IRQ 29 affinity is now unmanaged
Jan 30 17:02:37 np0005602930.novalocal irqbalance[781]: Cannot change IRQ 27 affinity: Operation not permitted
Jan 30 17:02:37 np0005602930.novalocal irqbalance[781]: IRQ 27 affinity is now unmanaged
Jan 30 17:29:57 compute-0 irqbalance[781]: Cannot change IRQ 26 affinity: Operation not permitted
Jan 30 17:29:57 compute-0 irqbalance[781]: IRQ 26 affinity is now unmanaged

○ iscsi-init.service - One time configuration for iscsi.service
     Loaded: loaded (/usr/lib/systemd/system/iscsi-init.service; disabled; preset: disabled)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 17:41:06 UTC; 2h 40min ago

Jan 30 17:40:32 compute-0 systemd[1]: One time configuration for iscsi.service was skipped because of an unmet condition check (ConditionPathExists=!/etc/iscsi/initiatorname.iscsi).
Jan 30 17:41:06 compute-0 systemd[1]: One time configuration for iscsi.service was skipped because of an unmet condition check (ConditionPathExists=!/etc/iscsi/initiatorname.iscsi).

○ iscsi-onboot.service - Special handling of early boot iSCSI sessions
     Loaded: loaded (/usr/lib/systemd/system/iscsi-onboot.service; enabled; preset: enabled)
     Active: inactive (dead)
       Docs: man:iscsiadm(8)
             man:iscsid(8)

● iscsi-shutdown.service - Logout off all iSCSI sessions on shutdown
     Loaded: loaded (/usr/lib/systemd/system/iscsi-shutdown.service; static)
     Active: active (exited) since Fri 2026-01-30 17:40:32 UTC; 2h 40min ago
       Docs: man:iscsid(8)
             man:iscsiadm(8)
   Main PID: 162298 (code=exited, status=0/SUCCESS)
        CPU: 3ms

Jan 30 17:40:32 compute-0 systemd[1]: Starting Logout off all iSCSI sessions on shutdown...
Jan 30 17:40:32 compute-0 systemd[1]: Finished Logout off all iSCSI sessions on shutdown.

○ iscsi-starter.service
     Loaded: loaded (/usr/lib/systemd/system/iscsi-starter.service; enabled; preset: disabled)
     Active: inactive (dead)

○ iscsi.service - Login and scanning of iSCSI devices
     Loaded: loaded (/usr/lib/systemd/system/iscsi.service; indirect; preset: enabled)
     Active: inactive (dead)
       Docs: man:iscsiadm(8)
             man:iscsid(8)

● iscsid.service - Open-iSCSI
     Loaded: loaded (/usr/lib/systemd/system/iscsid.service; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:41:06 UTC; 2h 40min ago
TriggeredBy: ● iscsid.socket
       Docs: man:iscsid(8)
             man:iscsiuio(8)
             man:iscsiadm(8)
   Main PID: 168784 (iscsid)
     Status: "Ready to process requests"
         IO: 0B read, 0B written
      Tasks: 1 (limit: 100092)
     Memory: 1.9M (peak: 2.0M)
        CPU: 5ms
     CGroup: /system.slice/iscsid.service
             └─168784 /usr/sbin/iscsid -f

Jan 30 17:41:06 compute-0 systemd[1]: Starting Open-iSCSI...
Jan 30 17:41:06 compute-0 systemd[1]: Started Open-iSCSI.

○ iscsiuio.service - iSCSI UserSpace I/O driver
     Loaded: loaded (/usr/lib/systemd/system/iscsiuio.service; disabled; preset: disabled)
     Active: inactive (dead)
TriggeredBy: ○ iscsiuio.socket
       Docs: man:iscsiuio(8)

● kdump.service - Crash recovery kernel arming
     Loaded: loaded (/usr/lib/systemd/system/kdump.service; enabled; preset: enabled)
     Active: active (exited) since Fri 2026-01-30 16:49:58 UTC; 3h 31min ago
   Main PID: 1012 (code=exited, status=0/SUCCESS)
        CPU: 13.834s

Jan 30 16:49:57 np0005602930.novalocal dracut[1268]: Linked:         0 files
Jan 30 16:49:57 np0005602930.novalocal dracut[1268]: Compared:       0 xattrs
Jan 30 16:49:57 np0005602930.novalocal dracut[1268]: Compared:       0 files
Jan 30 16:49:57 np0005602930.novalocal dracut[1268]: Saved:          0 B
Jan 30 16:49:57 np0005602930.novalocal dracut[1268]: Duration:       0.000312 seconds
Jan 30 16:49:57 np0005602930.novalocal dracut[1268]: *** Hardlinking files done ***
Jan 30 16:49:58 np0005602930.novalocal dracut[1268]: *** Creating initramfs image file '/boot/initramfs-5.14.0-665.el9.x86_64kdump.img' done ***
Jan 30 16:49:58 np0005602930.novalocal kdumpctl[1019]: kdump: kexec: loaded kdump kernel
Jan 30 16:49:58 np0005602930.novalocal kdumpctl[1019]: kdump: Starting kdump: [OK]
Jan 30 16:49:58 np0005602930.novalocal systemd[1]: Finished Crash recovery kernel arming.

● kmod-static-nodes.service - Create List of Static Device Nodes
     Loaded: loaded (/usr/lib/systemd/system/kmod-static-nodes.service; static)
     Active: active (exited) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
   Main PID: 673 (code=exited, status=0/SUCCESS)
        CPU: 2ms

● ldconfig.service - Rebuild Dynamic Linker Cache
     Loaded: loaded (/usr/lib/systemd/system/ldconfig.service; static)
     Active: active (exited) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:ldconfig(8)
   Main PID: 696 (code=exited, status=0/SUCCESS)
        CPU: 40ms

Jan 30 16:49:47 localhost systemd[1]: Starting Rebuild Dynamic Linker Cache...
Jan 30 16:49:47 localhost systemd[1]: Finished Rebuild Dynamic Linker Cache.

○ libvirtd.service
     Loaded: masked (Reason: Unit libvirtd.service is masked.)
     Active: inactive (dead)
TriggeredBy: ○ libvirtd.socket
             ○ libvirtd-ro.socket
             ○ libvirtd-admin.socket

○ loadmodules.service - Load legacy module configuration
     Loaded: loaded (/usr/lib/systemd/system/loadmodules.service; enabled; preset: enabled)
     Active: inactive (dead)

○ logrotate.service - Rotate log files
     Loaded: loaded (/usr/lib/systemd/system/logrotate.service; static)
     Active: Unit lvm2-activation-early.service could not be found.
inactive (dead)
TriggeredBy: ● logrotate.timer
       Docs: man:logrotate(8)
             man:logrotate.conf(5)

○ lvm2-lvmpolld.service - LVM2 poll daemon
     Loaded: loaded (/usr/lib/systemd/system/lvm2-lvmpolld.service; static)
     Active: inactive (dead)
TriggeredBy: ● lvm2-lvmpolld.socket
       Docs: man:lvmpolld(8)

● lvm2-monitor.service - Monitoring of LVM2 mirrors, snapshots etc. using dmeventd or progress polling
     Loaded: loaded (/usr/lib/systemd/system/lvm2-monitor.service; enabled; preset: enabled)
     Active: active (exited) since Fri 2026-01-30 17:24:18 UTC; 2h 57min ago
       Docs: man:dmeventd(8)
             man:lvcreate(8)
             man:lvchange(8)
             man:vgchange(8)
   Main PID: 34254 (code=exited, status=0/SUCCESS)
        CPU: 29ms

Jan 30 17:24:18 compute-0 systemd[1]: Starting Monitoring of LVM2 mirrors, snapshots etc. using dmeventd or progress polling...
Jan 30 17:24:18 compute-0 systemd[1]: Finished Monitoring of LVM2 mirrors, snapshots etc. using dmeventd or progress polling.

○ mdmonitor.service - Software RAID monitoring and management
     Loaded: loaded (/usr/lib/systemd/system/mdmonitor.service; enabled; preset: enabled)
     Active: inactive (dead)

○ microcode.service - Load CPU microcode update
     Loaded: loaded (/usr/lib/systemd/system/microcode.service; enabled; preset: enabled)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:49:48 UTC; 3h 31min ago

Jan 30 16:49:48 localhost systemd[1]: Load CPU microcode update was skipped because of an unmet condition check (ConditionPathExists=/sys/devices/system/cpu/microcode/reload).

○ modprobe@configfs.service - Load Kernel Module configfs
     Loaded: loaded (/usr/lib/systemd/system/modprobe@.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
       Docs: man:modprobe(8)
   Main PID: 769 (code=exited, status=0/SUCCESS)
        CPU: 3ms

Jan 30 16:49:48 localhost systemd[1]: Starting Load Kernel Module configfs...
Jan 30 16:49:48 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully.
Jan 30 16:49:48 localhost systemd[1]: Finished Load Kernel Module configfs.

○ modprobe@drm.service - Load Kernel Module drm
     Loaded: loaded (/usr/lib/systemd/system/modprobe@.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:modprobe(8)
   Main PID: 675 (code=exited, status=0/SUCCESS)
        CPU: 94ms

Jan 30 16:49:47 localhost systemd[1]: modprobe@drm.service: Deactivated successfully.
Jan 30 16:49:47 localhost systemd[1]: Finished Load Kernel Module drm.

○ modprobe@efi_pstore.service - Load Kernel Module efi_pstore
     Loaded: loaded (/usr/lib/systemd/system/modprobe@.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:modprobe(8)
   Main PID: 676 (code=exited, status=0/SUCCESS)
        CPU: 1ms

○ modprobe@fuse.service - Load Kernel Module fuse
     Loaded: loaded (/usr/lib/systemd/system/modprobe@.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:modprobe(8)
   Main PID: 677 (code=exited, status=0/SUCCESS)
        CPU: 46ms

Jan 30 16:49:47 localhost systemd[1]: modprobe@fuse.service: Deactivated successfully.
Jan 30 16:49:47 localhost systemd[1]: Finished Load Kernel Module fuse.

● multipathd.service - Device-Mapper Multipath Device Controller
     Loaded: loaded (/usr/lib/systemd/system/multipathd.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 17:41:07 UTC; 2h 40min ago
TriggeredBy: ● multipathd.socket
   Main PID: 168942 (multipathd)
     Status: "up"
         IO: 0B read, 0B written
      Tasks: 7
     Memory: 18.4M (peak: 19.0M)
        CPU: 860ms
     CGroup: /system.slice/multipathd.service
             └─168942 /sbin/multipathd -d -s

Jan 30 17:41:07 compute-0 systemd[1]: Starting Device-Mapper Multipath Device Controller...
Jan 30 17:41:07 compute-0 multipathd[168942]: --------start up--------
Jan 30 17:41:07 compute-0 multipathd[168942]: read /etc/multipath.conf
Jan 30 17:41:07 compute-0 multipathd[168942]: path checkers start up
Jan 30 17:41:07 compute-0 systemd[1]: Started Device-Mapper Multipath Device Controller.

○ netns-placeholder.service - Create netns directory
     Loaded: loaded (/etc/systemd/system/netns-placeholder.service; enabled; preset: enabled)
     Active: inactive (dead) since Fri 2026-01-30 17:34:47 UTC; 2h 46min ago
   Main PID: 101922 (code=exited, status=0/SUCCESS)
        CPU: 9ms

Jan 30 17:34:47 compute-0 systemd[1]: Starting Create netns directory...
Jan 30 17:34:47 compute-0 systemd[1]: netns-placeholder.service: Deactivated successfully.
Jan 30 17:34:47 compute-0 systemd[1]: Finished Create netns directory.

○ network.service - LSB: Bring up/down networking
     Loaded: loaded (/etc/rc.d/init.d/network; generated)
     Active: inactive (dead)
       Docs: man:systemd-sysv-generator(8)

● NetworkManager-wait-online.service - Network Manager Wait Online
     Loaded: loaded (/usr/lib/systemd/system/NetworkManager-wait-online.service; enabled; preset: disabled)
     Active: active (exited) since Fri 2026-01-30 17:29:21 UTC; 2h 51min ago
       Docs: man:NetworkManager-wait-online.service(8)
   Main PID: 55531 (code=exited, status=0/SUCCESS)
        CPU: 21ms

Jan 30 17:29:21 compute-0 systemd[1]: Starting Network Manager Wait Online...
Jan 30 17:29:21 compute-0 systemd[1]: Finished Network Manager Wait Online.

● NetworkManager.service - Network Manager
     Loaded: loaded (/usr/lib/systemd/system/NetworkManager.service; enabled; preset: enabled)
    Drop-In: /usr/lib/systemd/system/NetworkManager.service.d
             └─NetworkManager-ovs.conf
     Active: active (running) since Fri 2026-01-30 17:29:21 UTC; 2h 51min ago
       Docs: man:NetworkManager(8)
   Main PID: 55516 (NetworkManager)
         IO: 104.0K read, 250.0K written
      Tasks: 3 (limit: 100092)
     Memory: 5.4M (peak: 6.5M)
        CPU: 1min 15.504s
     CGroup: /system.slice/NetworkManager.service
             └─55516 /usr/sbin/NetworkManager --no-daemon

Jan 30 19:56:31 compute-0 NetworkManager[55516]: <info>  [1769802991.9417] manager: (tap8707e31d-f0): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/178)
Jan 30 20:05:06 compute-0 NetworkManager[55516]: <info>  [1769803506.1212] device (tap95c42cc4-0e): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Jan 30 20:07:05 compute-0 NetworkManager[55516]: <info>  [1769803625.5663] manager: (tap4a21055e-d9): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/179)
Jan 30 20:07:06 compute-0 NetworkManager[55516]: <info>  [1769803626.5681] manager: (tap4a21055e-d9): new Tun device (/org/freedesktop/NetworkManager/Devices/180)
Jan 30 20:07:06 compute-0 NetworkManager[55516]: <info>  [1769803626.6376] device (tap4a21055e-d9): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Jan 30 20:07:06 compute-0 NetworkManager[55516]: <info>  [1769803626.6383] device (tap4a21055e-d9): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Jan 30 20:07:06 compute-0 NetworkManager[55516]: <info>  [1769803626.7143] manager: (tap68e01440-60): new Veth device (/org/freedesktop/NetworkManager/Devices/181)
Jan 30 20:07:06 compute-0 NetworkManager[55516]: <info>  [1769803626.7624] device (tap68e01440-60): carrier: link connected
Jan 30 20:07:06 compute-0 NetworkManager[55516]: <info>  [1769803626.8665] manager: (tap68e01440-60): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/182)
Jan 30 20:15:29 compute-0 NetworkManager[55516]: <info>  [1769804129.7508] device (tap4a21055e-d9): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')

○ nfs-idmapd.service - NFSv4 ID-name mapping service
     Loaded: loaded (/usr/lib/systemd/system/nfs-idmapd.service; static)
     Active: inactive (dead)
       Docs: man:idmapd(8)

○ nfs-mountd.service - NFS Mount Daemon
  Unit ntpd.service could not be found.
Unit ntpdate.service could not be found.
   Loaded: loaded (/usr/lib/systemd/system/nfs-mountd.service; static)
     Active: inactive (dead)
       Docs: man:rpc.mountd(8)

○ nfs-server.service - NFS server and services
     Loaded: loaded (/usr/lib/systemd/system/nfs-server.service; disabled; preset: disabled)
     Active: inactive (dead)
       Docs: man:rpc.nfsd(8)
             man:exportfs(8)

○ nfs-utils.service - NFS server and client services
     Loaded: loaded (/usr/lib/systemd/system/nfs-utils.service; static)
     Active: inactive (dead)

○ nfsdcld.service - NFSv4 Client Tracking Daemon
     Loaded: loaded (/usr/lib/systemd/system/nfsdcld.service; static)
     Active: inactive (dead)
       Docs: man:nfsdcld(8)

● nftables.service - Netfilter Tables
     Loaded: loaded (/usr/lib/systemd/system/nftables.service; enabled; preset: disabled)
     Active: active (exited) since Fri 2026-01-30 17:31:24 UTC; 2h 49min ago
       Docs: man:nft(8)
   Main PID: 69095 (code=exited, status=0/SUCCESS)
        CPU: 12ms

Jan 30 17:31:24 compute-0 systemd[1]: Starting Netfilter Tables...
Jan 30 17:31:24 compute-0 systemd[1]: Finished Netfilter Tables.

● nis-domainname.service - Read and set NIS domainname from /etc/sysconfig/network
     Loaded: loaded (/usr/lib/systemd/system/nis-domainname.service; enabled; preset: enabled)
     Active: active (exited) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
   Main PID: 678 (code=exited, status=0/SUCCESS)
        CPU: 2ms

Jan 30 16:49:47 localhost systemd[1]: Finished Read and set NIS domainname from /etc/sysconfig/network.

○ nvmefc-boot-connections.service - Auto-connect to subsystems on FC-NVME devices found during boot
     Loaded: loaded (/usr/lib/systemd/system/nvmefc-boot-connections.service; enabled; preset: enabled)
     Active: inactive (dead)

● openvswitch.service - Open vSwitch
     Loaded: loaded (/usr/lib/systemd/system/openvswitch.service; enabled; preset: disabled)
     Active: active (exited) since Fri 2026-01-30 17:29:08 UTC; 2h 52min ago
   Main PID: 53824 (code=exited, status=0/SUCCESS)
        CPU: 2ms

Jan 30 17:29:08 compute-0 systemd[1]: Starting Open vSwitch...
Jan 30 17:29:08 compute-0 systemd[1]: Finished Open vSwitch.

● ovs-delete-transient-ports.service - Open vSwitch Delete Transient Ports
     Loaded: loaded (/usr/lib/systemd/system/ovs-delete-transient-ports.service; static)
     Active: active (exited) since Fri 2026-01-30 17:29:08 UTC; 2h 52min ago
   Main PID: 53760 (code=exited, status=0/SUCCESS)
        CPU: 31ms

Jan 30 17:29:08 compute-0 systemd[1]: Starting Open vSwitch Delete Transient Ports...
Jan 30 17:29:08 compute-0 systemd[1]: Finished Open vSwitch Delete Transient Ports.

● ovs-vswitchd.service - Open vSwitch Forwarding Unit
     Loaded: loaded (/usr/lib/systemd/system/ovs-vswitchd.service; static)
     Active: active (running) since Fri 2026-01-30 17:29:08 UTC; 2h 52min ago
   Main PID: 53815 (ovs-vswitchd)
         IO: 3.4M read, 828.0K written
      Tasks: 13 (limit: 100092)
     Memory: 251.3M (peak: 253.1M)
        CPU: 33.864s
     CGroup: /system.slice/ovs-vswitchd.service
             └─53815 ovs-vswitchd unix:/var/run/openvswitch/db.sock -vconsole:emer -vsyslog:err -vfile:info --mlockall --user openvswitch:hugetlbfs --no-chdir --log-file=/var/log/openvswitch/ovs-vswitchd.log --pidfile=/var/run/openvswitch/ovs-vswitchd.pid --detach

Jan 30 17:29:08 compute-0 systemd[1]: Starting Open vSwitch Forwarding Unit...
Jan 30 17:29:08 compute-0 ovs-ctl[53804]: Inserting openvswitch module [  OK  ]
Jan 30 17:29:08 compute-0 ovs-ctl[53773]: Starting ovs-vswitchd [  OK  ]
Jan 30 17:29:08 compute-0 ovs-ctl[53773]: Enabling remote OVSDB managers [  OK  ]
Jan 30 17:29:08 compute-0 ovs-vsctl[53823]: ovs|00001|vsctl|INFO|Called as ovs-vsctl --no-wait add Open_vSwitch . external-ids hostname=compute-0
Jan 30 17:29:08 compute-0 systemd[1]: Started Open vSwitch Forwarding Unit.

● ovsdb-server.service - Open vSwitch Database Unit
     Loaded: loaded (/usr/lib/systemd/system/ovsdb-server.service; static)
     Active: active (running) since Fri 2026-01-30 17:29:0Unit pacemaker.service could not be found.
Unit plymouth-quit-wait.service could not be found.
Unit plymouth-start.service could not be found.
8 UTC; 2h 52min ago
   Main PID: 53733 (ovsdb-server)
         IO: 1.2M read, 1.1M written
      Tasks: 1 (limit: 100092)
     Memory: 5.0M (peak: 41.2M)
        CPU: 47.872s
     CGroup: /system.slice/ovsdb-server.service
             └─53733 ovsdb-server /etc/openvswitch/conf.db -vconsole:emer -vsyslog:err -vfile:info --remote=punix:/var/run/openvswitch/db.sock --private-key=db:Open_vSwitch,SSL,private_key --certificate=db:Open_vSwitch,SSL,certificate --bootstrap-ca-cert=db:Open_vSwitch,SSL,ca_cert --user openvswitch:hugetlbfs --no-chdir --log-file=/var/log/openvswitch/ovsdb-server.log --pidfile=/var/run/openvswitch/ovsdb-server.pid --detach

Jan 30 17:29:08 compute-0 chown[53680]: /usr/bin/chown: cannot access '/run/openvswitch': No such file or directory
Jan 30 17:29:08 compute-0 ovs-ctl[53685]: /etc/openvswitch/conf.db does not exist ... (warning).
Jan 30 17:29:08 compute-0 ovs-ctl[53685]: Creating empty database /etc/openvswitch/conf.db [  OK  ]
Jan 30 17:29:08 compute-0 ovs-ctl[53685]: Starting ovsdb-server [  OK  ]
Jan 30 17:29:08 compute-0 ovs-vsctl[53734]: ovs|00001|vsctl|INFO|Called as ovs-vsctl --no-wait -- init -- set Open_vSwitch . db-version=8.5.1
Jan 30 17:29:08 compute-0 ovs-vsctl[53750]: ovs|00001|vsctl|INFO|Called as ovs-vsctl --no-wait set Open_vSwitch . ovs-version=3.3.5-115.el9s "external-ids:system-id=\"6a98adb6-6cf2-49e6-9a5d-da14fa0a5a9a\"" "external-ids:rundir=\"/var/run/openvswitch\"" "system-type=\"centos\"" "system-version=\"9\""
Jan 30 17:29:08 compute-0 ovs-ctl[53685]: Configuring Open vSwitch system IDs [  OK  ]
Jan 30 17:29:08 compute-0 ovs-ctl[53685]: Enabling remote OVSDB managers [  OK  ]
Jan 30 17:29:08 compute-0 ovs-vsctl[53759]: ovs|00001|vsctl|INFO|Called as ovs-vsctl --no-wait add Open_vSwitch . external-ids hostname=compute-0
Jan 30 17:29:08 compute-0 systemd[1]: Started Open vSwitch Database Unit.

● podman.service - Podman API Service
     Loaded: loaded (/usr/lib/systemd/system/podman.service; disabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:43:54 UTC; 2h 37min ago
TriggeredBy: ● podman.socket
       Docs: man:podman-system-service(1)
   Main PID: 198936 (podman)
         IO: 0B read, 0B written
      Tasks: 15 (limit: 100092)
     Memory: 21.6M (peak: 23.6M)
        CPU: 16.730s
     CGroup: /system.slice/podman.service
             └─198936 /usr/bin/podman --log-level=info system service

Jan 30 17:43:54 compute-0 podman[198936]: time="2026-01-30T17:43:54Z" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled"
Jan 30 17:43:54 compute-0 podman[198936]: time="2026-01-30T17:43:54Z" level=info msg="Using systemd socket activation to determine API endpoint"
Jan 30 17:43:54 compute-0 podman[198936]: time="2026-01-30T17:43:54Z" level=info msg="API service listening on \"/run/podman/podman.sock\". URI: \"unix:///run/podman/podman.sock\""
Jan 30 17:43:54 compute-0 podman[198936]: @ - - [30/Jan/2026:17:43:54 +0000] "GET /v4.9.3/libpod/_ping HTTP/1.1" 200 2 "" "Go-http-client/1.1"
Jan 30 17:43:54 compute-0 podman[198936]: time="2026-01-30T17:43:54Z" level=info msg="List containers: received `last` parameter - overwriting `limit`"
Jan 30 17:43:54 compute-0 podman[198936]: @ - - [30/Jan/2026:17:43:54 +0000] "GET /v4.9.3/libpod/containers/json?all=true&external=false&last=0&namespace=false&size=true&sync=false HTTP/1.1" 200 18074 "" "Go-http-client/1.1"
Jan 30 18:43:54 compute-0 podman[198936]: time="2026-01-30T18:43:54Z" level=info msg="List containers: received `last` parameter - overwriting `limit`"
Jan 30 18:43:54 compute-0 podman[198936]: @ - - [30/Jan/2026:18:43:54 +0000] "GET /v4.9.3/libpod/containers/json?all=true&external=false&last=0&namespace=false&size=true&sync=false HTTP/1.1" 200 22980 "" "Go-http-client/1.1"
Jan 30 19:43:54 compute-0 podman[198936]: time="2026-01-30T19:43:54Z" level=info msg="List containers: received `last` parameter - overwriting `limit`"
Jan 30 19:43:54 compute-0 podman[198936]: @ - - [30/Jan/2026:19:43:54 Unit power-profiles-daemon.service could not be found.
Unit rpc-svcgssd.service could not be found.
+0000] "GET /v4.9.3/libpod/containers/json?all=true&external=false&last=0&namespace=false&size=true&sync=false HTTP/1.1" 200 22980 "" "Go-http-client/1.1"

● polkit.service - Authorization Manager
     Loaded: loaded (/usr/lib/systemd/system/polkit.service; static)
     Active: active (running) since Fri 2026-01-30 17:26:22 UTC; 2h 54min ago
       Docs: man:polkit(8)
   Main PID: 43692 (polkitd)
         IO: 18.6M read, 0B written
      Tasks: 12 (limit: 100092)
     Memory: 24.4M (peak: 26.8M)
        CPU: 949ms
     CGroup: /system.slice/polkit.service
             └─43692 /usr/lib/polkit-1/polkitd --no-debug

Jan 30 17:37:55 compute-0 polkitd[43692]: Reloading rules
Jan 30 17:37:55 compute-0 polkitd[43692]: Collecting garbage unconditionally...
Jan 30 17:37:55 compute-0 polkitd[43692]: Loading rules from directory /etc/polkit-1/rules.d
Jan 30 17:37:55 compute-0 polkitd[43692]: Loading rules from directory /usr/share/polkit-1/rules.d
Jan 30 17:37:55 compute-0 polkitd[43692]: Finished loading, compiling and executing 3 rules
Jan 30 17:37:55 compute-0 polkitd[43692]: Reloading rules
Jan 30 17:37:55 compute-0 polkitd[43692]: Collecting garbage unconditionally...
Jan 30 17:37:55 compute-0 polkitd[43692]: Loading rules from directory /etc/polkit-1/rules.d
Jan 30 17:37:55 compute-0 polkitd[43692]: Loading rules from directory /usr/share/polkit-1/rules.d
Jan 30 17:37:55 compute-0 polkitd[43692]: Finished loading, compiling and executing 3 rules

○ raid-check.service - RAID setup health check
     Loaded: loaded (/usr/lib/systemd/system/raid-check.service; static)
     Active: inactive (dead)
TriggeredBy: ○ raid-check.timer

○ rbdmap.service - Map RBD devices
     Loaded: loaded (/usr/lib/systemd/system/rbdmap.service; disabled; preset: disabled)
     Active: inactive (dead)

○ rc-local.service - /etc/rc.d/rc.local Compatibility
     Loaded: loaded (/usr/lib/systemd/system/rc-local.service; static)
     Active: inactive (dead)
       Docs: man:systemd-rc-local-generator(8)

○ rescue.service - Rescue Shell
     Loaded: loaded (/usr/lib/systemd/system/rescue.service; static)
     Active: inactive (dead)
       Docs: man:sulogin(8)

○ rpc-gssd.service - RPC security service for NFS client and server
     Loaded: loaded (/usr/lib/systemd/system/rpc-gssd.service; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:49:49 UTC; 3h 31min ago
       Docs: man:rpc.gssd(8)

Jan 30 16:49:49 np0005602930.novalocal systemd[1]: RPC security service for NFS client and server was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab).

● rpc-statd-notify.service - Notify NFS peers of a restart
     Loaded: loaded (/usr/lib/systemd/system/rpc-statd-notify.service; static)
     Active: active (exited) since Fri 2026-01-30 16:49:51 UTC; 3h 31min ago
       Docs: man:sm-notify(8)
             man:rpc.statd(8)
        CPU: 5ms

Jan 30 16:49:51 np0005602930.novalocal systemd[1]: Starting Notify NFS peers of a restart...
Jan 30 16:49:51 np0005602930.novalocal systemd[1]: Started Notify NFS peers of a restart.

○ rpc-statd.service - NFS status monitor for NFSv2/3 locking.
     Loaded: loaded (/usr/lib/systemd/system/rpc-statd.service; static)
     Active: inactive (dead)
       Docs: man:rpc.statd(8)

● rpcbind.service - RPC Bind
     Loaded: loaded (/usr/lib/systemd/system/rpcbind.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
TriggeredBy: ● rpcbind.socket
       Docs: man:rpcbind(8)
   Main PID: 701 (rpcbind)
         IO: 0B read, 0B written
      Tasks: 1 (limit: 100092)
     Memory: 2.5M (peak: 2.8M)
        CPU: 54ms
     CGroup: /system.slice/rpcbind.service
             └─701 /usr/bin/rpcbind -w -f

Jan 30 16:49:47 localhost systemd[1]: Starting RPC Bind...
Jan 30 16:49:47 localhost systemd[1]: Started RPC Bind.

● rsyslog.service - System Logging Service
     Loaded: loaded (/usr/lib/systemd/system/rsyslog.service; enabled; preset: enabled)
     Active: active (running) since FrUnit sntp.service could not be found.
Unit sshd-keygen.service could not be found.
i 2026-01-30 16:49:51 UTC; 3h 31min ago
       Docs: man:rsyslogd(8)
             https://www.rsyslog.com/doc/
   Main PID: 1004 (rsyslogd)
         IO: 0B read, 49.1M written
      Tasks: 3 (limit: 100092)
     Memory: 50.6M (peak: 51.2M)
        CPU: 17.102s
     CGroup: /system.slice/rsyslog.service
             └─1004 /usr/sbin/rsyslogd -n

Jan 30 20:13:23 compute-0 rsyslogd[1004]: message too long (8192) with configured size 8096, begin of message is: 2026-01-30 20:13:23.618 12 ERROR oslo_messaging.notify.messaging [-] Could not s [v8.2510.0-2.el9 try https://www.rsyslog.com/e/2445 ]
Jan 30 20:15:23 compute-0 rsyslogd[1004]: message too long (8192) with configured size 8096, begin of message is: 2026-01-30 20:15:23.474 12 ERROR oslo_messaging.notify.messaging [-] Could not s [v8.2510.0-2.el9 try https://www.rsyslog.com/e/2445 ]
Jan 30 20:15:23 compute-0 rsyslogd[1004]: message too long (8192) with configured size 8096, begin of message is: 2026-01-30 20:15:23.476 12 ERROR oslo_messaging.notify.messaging [-] Could not s [v8.2510.0-2.el9 try https://www.rsyslog.com/e/2445 ]
Jan 30 20:15:23 compute-0 rsyslogd[1004]: message too long (8192) with configured size 8096, begin of message is: 2026-01-30 20:15:23.489 12 ERROR oslo_messaging.notify.messaging [-] Could not s [v8.2510.0-2.el9 try https://www.rsyslog.com/e/2445 ]
Jan 30 20:15:23 compute-0 rsyslogd[1004]: message too long (8192) with configured size 8096, begin of message is: 2026-01-30 20:15:23.492 12 ERROR oslo_messaging.notify.messaging [-] Could not s [v8.2510.0-2.el9 try https://www.rsyslog.com/e/2445 ]
Jan 30 20:15:23 compute-0 rsyslogd[1004]: message too long (8192) with configured size 8096, begin of message is: 2026-01-30 20:15:23.526 12 ERROR oslo_messaging.notify.messaging [-] Could not s [v8.2510.0-2.el9 try https://www.rsyslog.com/e/2445 ]
Jan 30 20:15:23 compute-0 rsyslogd[1004]: message too long (8192) with configured size 8096, begin of message is: 2026-01-30 20:15:23.583 12 ERROR oslo_messaging.notify.messaging [-] Could not s [v8.2510.0-2.el9 try https://www.rsyslog.com/e/2445 ]
Jan 30 20:15:23 compute-0 rsyslogd[1004]: message too long (8192) with configured size 8096, begin of message is: 2026-01-30 20:15:23.589 12 ERROR oslo_messaging.notify.messaging [-] Could not s [v8.2510.0-2.el9 try https://www.rsyslog.com/e/2445 ]
Jan 30 20:15:23 compute-0 rsyslogd[1004]: message too long (8192) with configured size 8096, begin of message is: 2026-01-30 20:15:23.596 12 ERROR oslo_messaging.notify.messaging [-] Could not s [v8.2510.0-2.el9 try https://www.rsyslog.com/e/2445 ]
Jan 30 20:15:23 compute-0 rsyslogd[1004]: message too long (8192) with configured size 8096, begin of message is: 2026-01-30 20:15:23.599 12 ERROR oslo_messaging.notify.messaging [-] Could not s [v8.2510.0-2.el9 try https://www.rsyslog.com/e/2445 ]

○ selinux-autorelabel-mark.service - Mark the need to relabel after reboot
     Loaded: loaded (/usr/lib/systemd/system/selinux-autorelabel-mark.service; enabled; preset: enabled)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:49:47 UTC; 3h 31min ago

Jan 30 16:49:47 localhost systemd[1]: Mark the need to relabel after reboot was skipped because of an unmet condition check (ConditionSecurity=!selinux).

● serial-getty@ttyS0.service - Serial Getty on ttyS0
     Loaded: loaded (/usr/lib/systemd/system/serial-getty@.service; enabled-runtime; preset: disabled)
     Active: active (running) since Fri 2026-01-30 16:49:51 UTC; 3h 31min ago
       Docs: man:agetty(8)
             man:systemd-getty-generator(8)
             http://0pointer.de/blog/projects/serial-console.html
   Main PID: 1011 (agetty)
         IO: 0B read, 0B written
      Tasks: 1 (limit: 100092)
     Memory: 300.0K (peak: 544.0K)
        CPU: 7ms
     CGroup: /system.slice/system-serial\x2dgetty.slice/serial-getty@ttyS0.service
             └─1011 /sbin/agetty -o "-p -- \\u" --keep-baud 115200,57600,38400,9600 - vt220

Jan 30 16:49:51 np0005602930.novalocal systemd[1]: Started Serial Getty on ttyS0.

○ sshd-keygen@ecdsa.service - OpenSSH ecdsa Server Key Generation
     Loaded: loaded (/usr/lib/systemd/system/sshd-keygen@.service; disabled; preset: disabled)
    Drop-In: /usr/lib/systemd/system/sshd-keygen@.service.d
             └─disable-sshd-keygen-if-cloud-init-active.conf
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 17:38:03 UTC; 2h 43min ago

Jan 30 16:49:48 localhost systemd[1]: OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
Jan 30 17:38:03 compute-0 systemd[1]: OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).

○ sshd-keygen@ed25519.service - OpenSSH ed25519 Server Key Generation
     Loaded: loaded (/usr/lib/systemd/system/sshd-keygen@.service; disabled; preset: disabled)
    Drop-In: /usr/lib/systemd/system/sshd-keygen@.service.d
             └─disable-sshd-keygen-if-cloud-init-active.conf
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 17:38:03 UTC; 2h 43min ago

Jan 30 16:49:48 localhost systemd[1]: OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
Jan 30 17:38:03 compute-0 systemd[1]: OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).

○ sshd-keygen@rsa.service - OpenSSH rsa Server Key Generation
     Loaded: loaded (/usr/lib/systemd/system/sshd-keygen@.service; disabled; preset: disabled)
    Drop-In: /usr/lib/systemd/system/sshd-keygen@.service.d
             └─disable-sshd-keygen-if-cloud-init-active.conf
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 17:38:03 UTC; 2h 43min ago

Jan 30 16:49:48 localhost systemd[1]: OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
Jan 30 17:38:03 compute-0 systemd[1]: OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).

● sshd.service - OpenSSH server daemon
     Loaded: loaded (/usr/lib/systemd/system/sshd.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 17:38:03 UTC; 2h 43min ago
       Docs: man:sshd(8)
             man:sshd_config(5)
   Main PID: 129563 (sshd)
         IO: 532.0K read, 236.0K written
      Tasks: 1 (limit: 100092)
     Memory: 100.6M (peak: 106.4M)
        CPU: 9.538s
     CGroup: /system.slice/sshd.service
             └─129563 "sshd: /usr/sbin/sshd -D [listener] 0 of 10-100 startups"

Jan 30 20:20:56 compute-0 sshd-session[264277]: Accepted publickey for zuul from 192.168.122.10 port 51006 ssh2: ECDSA SHA256:Vsj85zhQ6wxGK/NQv0CKfB9EEFik+MjhKazPQpL0sM0
Jan 30 20:20:56 compute-0 sshd-session[264277]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Jan 30 20:21:03 compute-0 sshd-session[264532]: Connection closed by authenticating user root 160.191.243.61 port 44796 [preauth]
Jan 30 20:21:03 compute-0 sshd-session[265012]: Accepted publickey for zuul from 38.102.83.246 port 39416 ssh2: ECDSA SHA256:Vsj85zhQ6wxGK/NQv0CKfB9EEFik+MjhKazPQpL0sM0
Jan 30 20:21:03 compute-0 sshd-session[265012]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Jan 30 20:21:04 compute-0 sshd-session[265050]: Accepted publickey for zuul from 38.102.83.246 port 39428 ssh2: ECDSA SHA256:Vsj85zhQ6wxGK/NQv0CKfB9EEFik+MjhKazPQpL0sM0
Jan 30 20:21:04 compute-0 sshd-session[265012]: pam_unix(sshd:session): session closed for user zuul
Jan 30 20:21:04 compute-0 sshd-session[265050]: pam_unix(sshd:session): session opened forUnit syslog.service could not be found.
 user zuul(uid=1000) by zuul(uid=0)
Jan 30 20:21:04 compute-0 sshd-session[265050]: pam_unix(sshd:session): session closed for user zuul
Jan 30 20:21:17 compute-0 sshd-session[267367]: Connection closed by authenticating user root 92.118.39.92 port 57606 [preauth]

○ sssd-kcm.service - SSSD Kerberos Cache Manager
     Loaded: loaded (/usr/lib/systemd/system/sssd-kcm.service; indirect; preset: disabled)
     Active: inactive (dead)
TriggeredBy: ● sssd-kcm.socket
       Docs: man:sssd-kcm(5)

○ sssd.service - System Security Services Daemon
     Loaded: loaded (/usr/lib/systemd/system/sssd.service; enabled; preset: enabled)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:49:48 UTC; 3h 31min ago

Jan 30 16:49:48 localhost systemd[1]: System Security Services Daemon was skipped because no trigger condition checks were met.

○ sysstat-collect.service - system activity accounting tool
     Loaded: loaded (/usr/lib/systemd/system/sysstat-collect.service; static)
     Active: inactive (dead)
TriggeredBy: ○ sysstat-collect.timer
       Docs: man:sa1(8)

○ sysstat-summary.service - Generate a daily summary of process accounting
     Loaded: loaded (/usr/lib/systemd/system/sysstat-summary.service; static)
     Active: inactive (dead)
TriggeredBy: ○ sysstat-summary.timer
       Docs: man:sa2(8)

○ sysstat.service - Resets System Activity Logs
     Loaded: loaded (/usr/lib/systemd/system/sysstat.service; enabled; preset: enabled)
     Active: inactive (dead)

○ systemd-ask-password-console.service - Dispatch Password Requests to Console
     Loaded: loaded (/usr/lib/systemd/system/systemd-ask-password-console.service; static)
     Active: inactive (dead)
TriggeredBy: ● systemd-ask-password-console.path
       Docs: man:systemd-ask-password-console.service(8)

○ systemd-ask-password-wall.service - Forward Password Requests to Wall
     Loaded: loaded (/usr/lib/systemd/system/systemd-ask-password-wall.service; static)
     Active: inactive (dead)
TriggeredBy: ● systemd-ask-password-wall.path
       Docs: man:systemd-ask-password-wall.service(8)

○ systemd-binfmt.service - Set Up Additional Binary Formats
     Loaded: loaded (/usr/lib/systemd/system/systemd-binfmt.service; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:systemd-binfmt.service(8)
             man:binfmt.d(5)
             https://docs.kernel.org/admin-guide/binfmt-misc.html
             https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems

Jan 30 16:49:47 localhost systemd[1]: Set Up Additional Binary Formats was skipped because no trigger condition checks were met.

○ systemd-boot-random-seed.service - Update Boot Loader Random Seed
     Loaded: loaded (/usr/lib/systemd/system/systemd-boot-random-seed.service; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:systemd-boot-random-seed.service(8)
             man:random(4)

Jan 30 16:49:47 localhost systemd[1]: Update Boot Loader Random Seed was skipped because no trigger condition checks were met.

● systemd-boot-update.service - Automatic Boot Loader Update
     Loaded: loaded (/usr/lib/systemd/system/systemd-boot-update.service; enabled; preset: enabled)
     Active: active (exited) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:bootctl(1)
   Main PID: 697 (code=exited, status=0/SUCCESS)
        CPU: 6ms

Jan 30 16:49:47 localhost systemd[1]: Starting Automatic Boot Loader Update...
Jan 30 16:49:47 localhost bootctl[697]: Couldn't find EFI system partition, skipping.
Jan 30 16:49:47 localhost systemd[1]: Finished Automatic Boot Loader Update.

○ systemd-firstboot.service - First Boot Wizard
     Loaded: loaded (/usr/lib/systemd/system/systemd-firstboot.service; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:systemd-firstboot(1)

Jan 30 16:49:47 localhost systemd[1]: First Boot Wizard was skipped because of an unmet condition check (ConditionFirstBoot=yes).

○ systemd-fsck-root.service - File System Check on Root Device
     Loaded: loaded (/usr/lib/systemd/system/systemd-fsck-root.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
   Duration: 2.690s
       Docs: man:systemd-fsck-root.service(8)
   Main PID: 552 (code=exited, status=0/SUCCESS)
        CPU: 16ms

Jan 30 16:49:44 localhost systemd[1]: Starting File System Check on /dev/disk/by-uuid/822f14ea-6e7e-41df-b0d8-fbe282d9ded8...
Jan 30 16:49:44 localhost systemd-fsck[554]: /usr/sbin/fsck.xfs: XFS file system.
Jan 30 16:49:44 localhost systemd[1]: Finished File System Check on /dev/disk/by-uuid/822f14ea-6e7e-41df-b0d8-fbe282d9ded8.

● systemd-hostnamed.service - Hostname Service
     Loaded: loaded (/usr/lib/systemd/system/systemd-hostnamed.service; static)
    Drop-In: /usr/lib/systemd/system/systemd-hostnamed.service.d
             └─disable-privatedevices.conf
     Active: active (running) since Fri 2026-01-30 20:21:04 UTC; 14s ago
       Docs: man:systemd-hostnamed.service(8)
             man:hostname(5)
             man:machine-info(5)
             man:org.freedesktop.resolve1(5)
   Main PID: 265104 (systemd-hostnam)
         IO: 0B read, 0B written
      Tasks: 1 (limit: 100092)
     Memory: 2.7M (peak: 3.8M)
        CPU: 69ms
     CGroup: /system.slice/systemd-hostnamed.service
             └─265104 /usr/lib/systemd/systemd-hostnamed

Jan 30 20:21:04 compute-0 systemd[1]: Starting Hostname Service...
Jan 30 20:21:04 compute-0 systemd[1]: Started Hostname Service.

● systemd-hwdb-update.service - Rebuild Hardware Database
     Loaded: loaded (/usr/lib/systemd/system/systemd-hwdb-update.service; static)
     Active: active (exited) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:hwdb(7)
             man:systemd-hwdb(8)
   Main PID: 689 (code=exited, status=0/SUCCESS)
        CPU: 735ms

Jan 30 16:49:47 localhost systemd[1]: Starting Rebuild Hardware Database...
Jan 30 16:49:47 localhost systemd[1]: Finished Rebuild Hardware Database.

○ systemd-initctl.service - initctl Compatibility Daemon
     Loaded: loaded (/usr/lib/systemd/system/systemd-initctl.service; static)
     Active: inactive (dead)
TriggeredBy: ● systemd-initctl.socket
       Docs: man:systemd-initctl.service(8)

● systemd-journal-catalog-update.service - Rebuild Journal Catalog
     Loaded: loaded (/usr/lib/systemd/system/systemd-journal-catalog-update.service; static)
     Active: active (exited) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:systemd-journald.service(8)
             man:journald.conf(5)
   Main PID: 702 (code=exited, status=0/SUCCESS)
        CPU: 24ms

Jan 30 16:49:47 localhost systemd[1]: Starting Rebuild Journal Catalog...
Jan 30 16:49:47 localhost systemd[1]: Finished Rebuild Journal Catalog.

● systemd-journal-flush.service - Flush Journal to Persistent Storage
     Loaded: loaded (/usr/lib/systemd/system/systemd-journal-flush.service; static)
     Active: active (exited) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:systemd-journald.service(8)
             man:journald.conf(5)
   Main PID: 690 (code=exited, status=0/SUCCESS)
        CPU: 9ms

Jan 30 16:49:47 localhost systemd[1]: Starting Flush Journal to Persistent Storage...
Jan 30 16:49:47 localhost systemd[1]: Finished Flush Journal to Persistent Storage.

● systemd-journald.service - Journal Service
     Loaded: loaded (/usr/lib/systemd/system/systemd-journald.service; static)
     Active: active (running) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
TriggeredBy: ● systemd-journald.socket
             ● systemd-journald-dev-log.socket
       Docs: man:systemd-journald.service(8)
             man:journald.conf(5)
   Main PID: 679 (systemd-journal)
     Status: "Processing requests..."
         IO: 0B read, 0B written
      Tasks: 1 (limit: 100092)
     Memory: 117.6M (peak: 118.1M)
        CPU: 14.315s
     CGroup: /system.slice/systemd-journald.service
             └─679 /usr/lib/systemd/systemd-journald

Jan 30 16:49:47 localhost systemd-journald[679]: Journal started
Jan 30 16:49:47 localhost systemd-journald[679]: Runtime Journal (/run/log/journal/bf0bc0bb03de29b24cba1cc9599cf5d0) is 8.0M, max 314.6M, 306.6M free.
Jan 30 16:49:46 localhost systemd[1]: systemd-journald.service: Deactivated successfully.
Jan 30 16:49:47 localhost systemd-journald[679]: Runtime Journal (/run/log/journal/bf0bc0bb03de29b24cba1cc9599cf5d0) is 8.0M, max 314.6M, 306.6M free.
Jan 30 16:49:47 localhost systemd-journald[679]: Received client request to flush runtime journal.
Jan 30 16:49:53 np0005602930.novalocal systemd-journald[679]: Time jumped backwards, rotating.

● systemd-logind.service - User Login Management
     Loaded: loaded (/usr/lib/systemd/system/systemd-logind.service; static)
    Drop-In: /usr/lib/systemd/system/systemd-logind.service.d
             └─10-grub2-logind-service.conf
     Active: active (running) since Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
       Docs: man:sd-login(3)
             man:systemd-logind.service(8)
             man:logind.conf(5)
             man:org.freedesktop.login1(5)
   Main PID: 791 (systemd-logind)
     Status: "Processing requests..."
         IO: 4.0K read, 0B written
      Tasks: 1 (limit: 100092)
     Memory: 6.7M (peak: 7.4M)
        CPU: 2.913s
     CGroup: /system.slice/systemd-logind.service
             └─791 /usr/lib/systemd/systemd-logind

Jan 30 20:18:44 compute-0 systemd-logind[791]: Removed session 156.
Jan 30 20:18:44 compute-0 systemd-logind[791]: Session 157 logged out. Waiting for processes to exit.
Jan 30 20:18:44 compute-0 systemd-logind[791]: Removed session 157.
Jan 30 20:20:56 compute-0 systemd-logind[791]: New session 158 of user zuul.
Jan 30 20:21:03 compute-0 systemd-logind[791]: New session 159 of user zuul.
Jan 30 20:21:04 compute-0 systemd-logind[791]: New session 160 of user zuul.
Jan 30 20:21:04 compute-0 systemd-logind[791]: Session 159 logged out. Waiting for processes to exit.
Jan 30 20:21:04 compute-0 systemd-logind[791]: Removed session 159.
Jan 30 20:21:04 compute-0 systemd-logind[791]: Session 160 logged out. Waiting for processes to exit.
Jan 30 20:21:04 compute-0 systemd-logind[791]: Removed session 160.

○ systemd-machine-id-commit.service - Commit a transient machine-id on disk
     Loaded: loaded (/usr/lib/systemd/system/systemd-machine-id-commit.service; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:systemd-machine-id-commit.service(8)

Jan 30 16:49:47 localhost systemd[1]: Commit a transient machine-id on disk was skipped because of an unmet condition check (ConditionPathIsMountPoint=/etc/machine-id).

● systemd-machined.service - Virtual Machine and Container Registration Service
     Loaded: loaded (/usr/lib/systemd/system/systemd-machined.service; static)
     Active: active (running) since Fri 2026-01-30 17:39:33 UTC; 2h 41min ago
       Docs: man:systemd-machined.service(8)
             man:org.freedesktop.machine1(5)
   Main PID: 154436 (systemd-machine)
     Status: "Processing requests..."
         IO: 0B read, 0B written
      Tasks: 1 (limit: 100092)
     Memory: 1.5M (peak: 2.1M)
        CPU: 1.895s
     CGroup: /system.slice/systemd-machined.service
             └─154436 /usr/lib/systemd/systemd-machined

Jan 30 19:35:03 compute-0 systemd-machined[154436]: New machine qemu-31-instance-00000039.
Jan 30 19:40:28 compute-0 systemd-machined[154436]: Machine qemu-31-instance-00000039 terminated.
Jan 30 19:41:45 compute-0 systemd-machined[154436]: New machine qemu-32-instance-0000003c.
Jan 30 19:47:12 compute-0 systemd-machined[154436]: Machine qemu-32-instance-0000003c terminated.
Jan 30 19:48:11 compute-0 systemd-machined[154436]: New machine qemu-33-instance-0000003f.
Jan 30 19:53:43 compute-0 systemd-machined[154436]: Machine qemu-33-instance-0000003f terminated.
Jan 30 19:56:31 compute-0 systemd-machined[154436]: New machine qemu-34-instance-00000042.
Jan 30 20:05:06 compute-0 Unit systemd-networkd-wait-online.service could not be found.
systemd-machined[154436]: Machine qemu-34-instance-00000042 terminated.
Jan 30 20:07:06 compute-0 systemd-machined[154436]: New machine qemu-35-instance-00000045.
Jan 30 20:15:29 compute-0 systemd-machined[154436]: Machine qemu-35-instance-00000045 terminated.

● systemd-modules-load.service - Load Kernel Modules
     Loaded: loaded (/usr/lib/systemd/system/systemd-modules-load.service; static)
     Active: active (exited) since Fri 2026-01-30 17:41:01 UTC; 2h 40min ago
       Docs: man:systemd-modules-load.service(8)
             man:modules-load.d(5)
   Main PID: 167165 (code=exited, status=0/SUCCESS)
        CPU: 6ms

Jan 30 17:41:01 compute-0 systemd[1]: Starting Load Kernel Modules...
Jan 30 17:41:01 compute-0 systemd[1]: Finished Load Kernel Modules.

● systemd-network-generator.service - Generate network units from Kernel command line
     Loaded: loaded (/usr/lib/systemd/system/systemd-network-generator.service; enabled; preset: enabled)
     Active: active (exited) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:systemd-network-generator.service(8)
   Main PID: 680 (code=exited, status=0/SUCCESS)
        CPU: 10ms

Jan 30 16:49:47 localhost systemd[1]: Finished Generate network units from Kernel command line.

○ systemd-pcrmachine.service - TPM2 PCR Machine ID Measurement
     Loaded: loaded (/usr/lib/systemd/system/systemd-pcrmachine.service; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
       Docs: man:systemd-pcrmachine.service(8)

○ systemd-pcrphase-initrd.service - TPM2 PCR Barrier (initrd)
     Loaded: loaded (/usr/lib/systemd/system/systemd-pcrphase-initrd.service; static)
     Active: inactive (dead)
       Docs: man:systemd-pcrphase-initrd.service(8)

○ systemd-pcrphase-sysinit.service - TPM2 PCR Barrier (Initialization)
     Loaded: loaded (/usr/lib/systemd/system/systemd-pcrphase-sysinit.service; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:systemd-pcrphase-sysinit.service(8)

Jan 30 16:49:47 localhost systemd[1]: TPM2 PCR Barrier (Initialization) was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f).

○ systemd-pcrphase.service - TPM2 PCR Barrier (User)
     Loaded: loaded (/usr/lib/systemd/system/systemd-pcrphase.service; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:49:49 UTC; 3h 31min ago
       Docs: man:systemd-pcrphase.service(8)

Jan 30 16:49:49 np0005602930.novalocal systemd[1]: TPM2 PCR Barrier (User) was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f).

○ systemd-pstore.service - Platform Persistent Storage Archival
     Loaded: loaded (/usr/lib/systemd/system/systemd-pstore.service; enabled; preset: enabled)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:systemd-pstore(8)

Jan 30 16:49:47 localhost systemd[1]: Platform Persistent Storage Archival was skipped because of an unmet condition check (ConditionDirectoryNotEmpty=/sys/fs/pstore).

● systemd-random-seed.service - Load/Save OS Random Seed
     Loaded: loaded (/usr/lib/systemd/system/systemd-random-seed.service; static)
     Active: active (exited) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:systemd-random-seed.service(8)
             man:random(4)
   Main PID: 691 (code=exited, status=0/SUCCESS)
        CPU: 6ms

Jan 30 16:49:47 localhost systemd[1]: Starting Load/Save OS Random Seed...
Jan 30 16:49:47 localhost systemd[1]: Finished Load/Save OS Random Seed.

● systemd-remount-fs.service - Remount Root and Kernel File Systems
     Loaded: loaded (/usr/lib/systemd/system/systemd-remount-fs.service; enabled-runtime; preset: disabled)
     Active: active (exited) since Fri 2026-01-30 16:49:47 UTC; 3h 3Unit systemd-timesyncd.service could not be found.
Unit systemd-tmpfiles.service could not be found.
1min ago
       Docs: man:systemd-remount-fs.service(8)
             https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
   Main PID: 681 (code=exited, status=0/SUCCESS)
        CPU: 15ms

Jan 30 16:49:47 localhost systemd[1]: Finished Remount Root and Kernel File Systems.

○ systemd-repart.service - Repartition Root Disk
     Loaded: loaded (/usr/lib/systemd/system/systemd-repart.service; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:systemd-repart.service(8)

○ systemd-rfkill.service - Load/Save RF Kill Switch Status
     Loaded: loaded (/usr/lib/systemd/system/systemd-rfkill.service; static)
     Active: inactive (dead)
TriggeredBy: ● systemd-rfkill.socket
       Docs: man:systemd-rfkill.service(8)

● systemd-sysctl.service - Apply Kernel Variables
     Loaded: loaded (/usr/lib/systemd/system/systemd-sysctl.service; static)
     Active: active (exited) since Fri 2026-01-30 17:26:33 UTC; 2h 54min ago
       Docs: man:systemd-sysctl.service(8)
             man:sysctl.d(5)
   Main PID: 45177 (code=exited, status=0/SUCCESS)
        CPU: 23ms

Jan 30 17:26:32 compute-0 systemd[1]: Starting Apply Kernel Variables...
Jan 30 17:26:33 compute-0 systemd[1]: Finished Apply Kernel Variables.

○ systemd-sysext.service - Merge System Extension Images into /usr/ and /opt/
     Loaded: loaded (/usr/lib/systemd/system/systemd-sysext.service; disabled; preset: disabled)
     Active: inactive (dead)
       Docs: man:systemd-sysext.service(8)

● systemd-sysusers.service - Create System Users
     Loaded: loaded (/usr/lib/systemd/system/systemd-sysusers.service; static)
     Active: active (exited) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:sysusers.d(5)
             man:systemd-sysusers.service(8)
   Main PID: 692 (code=exited, status=0/SUCCESS)
        CPU: 18ms

Jan 30 16:49:47 localhost systemd[1]: Starting Create System Users...
Jan 30 16:49:47 localhost systemd[1]: Finished Create System Users.

○ systemd-tmpfiles-clean.service - Cleanup of Temporary Directories
     Loaded: loaded (/usr/lib/systemd/system/systemd-tmpfiles-clean.service; static)
     Active: inactive (dead) since Fri 2026-01-30 17:05:10 UTC; 3h 16min ago
TriggeredBy: ● systemd-tmpfiles-clean.timer
       Docs: man:tmpfiles.d(5)
             man:systemd-tmpfiles(8)
   Main PID: 30009 (code=exited, status=0/SUCCESS)
        CPU: 51ms

Jan 30 17:05:10 compute-0 systemd[1]: Starting Cleanup of Temporary Directories...
Jan 30 17:05:10 compute-0 systemd[1]: systemd-tmpfiles-clean.service: Deactivated successfully.
Jan 30 17:05:10 compute-0 systemd[1]: Finished Cleanup of Temporary Directories.

● systemd-tmpfiles-setup-dev.service - Create Static Device Nodes in /dev
     Loaded: loaded (/usr/lib/systemd/system/systemd-tmpfiles-setup-dev.service; static)
     Active: active (exited) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:tmpfiles.d(5)
             man:systemd-tmpfiles(8)
   Main PID: 694 (code=exited, status=0/SUCCESS)
        CPU: 33ms

Jan 30 16:49:47 localhost systemd[1]: Starting Create Static Device Nodes in /dev...
Jan 30 16:49:47 localhost systemd[1]: Finished Create Static Device Nodes in /dev.

● systemd-tmpfiles-setup.service - Create Volatile Files and Directories
     Loaded: loaded (/usr/lib/systemd/system/systemd-tmpfiles-setup.service; static)
     Active: active (exited) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:tmpfiles.d(5)
             man:systemd-tmpfiles(8)
   Main PID: 698 (code=exited, status=0/SUCCESS)
        CPU: 102ms

Jan 30 16:49:47 localhost systemd[1]: Starting Create Volatile Files and Directories...
Jan 30 16:49:47 localhost systemd[1]: Finished Create Volatile Files and Directories.

● systemd-udev-settle.service - Wait for udev To Complete Device Initialization
     Loaded: loaded (/usr/lib/systemd/system/systemd-udev-settle.service; static)
     Active: active (exited) since Fri 2026-01-30 17:40:56 UTC; 2h 40min ago
       Docs: man:systemd-udev-settle.service(8)
   Main PID: 166259 (code=exited, status=0/SUCCESS)
        CPU: 13ms

Jan 30 17:40:56 compute-0 systemd[1]: Starting Wait for udev To Complete Device Initialization...
Jan 30 17:40:56 compute-0 udevadm[166259]: systemd-udev-settle.service is deprecated. Please fix multipathd.service not to pull it in.
Jan 30 17:40:56 compute-0 systemd[1]: Finished Wait for udev To Complete Device Initialization.

● systemd-udev-trigger.service - Coldplug All udev Devices
     Loaded: loaded (/usr/lib/systemd/system/systemd-udev-trigger.service; static)
    Drop-In: /usr/lib/systemd/system/systemd-udev-trigger.service.d
             └─systemd-udev-trigger-no-reload.conf
     Active: active (exited) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:udev(7)
             man:systemd-udevd.service(8)
   Main PID: 685 (code=exited, status=0/SUCCESS)
        CPU: 77ms

Jan 30 16:49:47 localhost systemd[1]: Finished Coldplug All udev Devices.

● systemd-udevd.service - Rule-based Manager for Device Events and Files
     Loaded: loaded (/usr/lib/systemd/system/systemd-udevd.service; static)
     Active: active (running) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
TriggeredBy: ● systemd-udevd-control.socket
             ● systemd-udevd-kernel.socket
       Docs: man:systemd-udevd.service(8)
             man:udev(7)
   Main PID: 731 (systemd-udevd)
     Status: "Processing with 32 children at max"
         IO: 2.2M read, 0B written
      Tasks: 1
     Memory: 21.5M (peak: 100.8M)
        CPU: 7.808s
     CGroup: /system.slice/systemd-udevd.service
             └─udev
               └─731 /usr/lib/systemd/systemd-udevd

Jan 30 19:28:14 compute-0 systemd-udevd[247487]: Network interface NamePolicy= disabled on kernel command line.
Jan 30 19:29:12 compute-0 systemd-udevd[248107]: Network interface NamePolicy= disabled on kernel command line.
Jan 30 19:30:05 compute-0 systemd-udevd[248657]: Network interface NamePolicy= disabled on kernel command line.
Jan 30 19:35:03 compute-0 systemd-udevd[250686]: Network interface NamePolicy= disabled on kernel command line.
Jan 30 19:41:45 compute-0 systemd-udevd[252773]: Network interface NamePolicy= disabled on kernel command line.
Jan 30 19:41:45 compute-0 systemd-udevd[252779]: Network interface NamePolicy= disabled on kernel command line.
Jan 30 19:48:11 compute-0 systemd-udevd[254697]: Network interface NamePolicy= disabled on kernel command line.
Jan 30 19:48:11 compute-0 systemd-udevd[254700]: Network interface NamePolicy= disabled on kernel command line.
Jan 30 19:56:31 compute-0 systemd-udevd[257230]: Network interface NamePolicy= disabled on kernel command line.
Jan 30 20:07:06 compute-0 systemd-udevd[260266]: Network interface NamePolicy= disabled on kernel command line.

● systemd-update-done.service - Update is Completed
     Loaded: loaded (/usr/lib/systemd/system/systemd-update-done.service; static)
     Active: active (exited) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:systemd-update-done.service(8)
   Main PID: 732 (code=exited, status=0/SUCCESS)
        CPU: 8ms

Jan 30 16:49:47 localhost systemd[1]: Starting Update is Completed...
Jan 30 16:49:47 localhost systemd[1]: Finished Update is Completed.

○ systemd-update-utmp-runlevel.service - Record Runlevel Change in UTMP
     Loaded: loaded (/usr/lib/systemd/system/systemd-update-utmp-runlevel.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:51 UTC; 3h 31min ago
       Docs: man:systemd-update-utmp-runlevel.service(8)
             man:utmp(5)
   Main PID: 1016 (code=exited, status=0/SUCCESS)
        CPU: 5ms

Jan 30 16:49:51 np0005602930.novalocal systemd[1]: Starting Record Runlevel Change in UTMP...
Jan 30 16:49:51 np0005602930.novalocal systemd[1]: systemd-update-utmp-runlevel.service: Deactivated successfully.
Jan 30 16:49:51 np0005602930.novalocal systemd[1]: Finished Record Runlevel Change in UTMP.

● systemd-update-utmp.service - Record System Boot/Shutdown in UTMP
     Loaded: loaded (/usr/lib/systemd/system/systemd-update-utmp.sUnit tlp.service could not be found.
ervice; static)
     Active: active (exited) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:systemd-update-utmp.service(8)
             man:utmp(5)
   Main PID: 730 (code=exited, status=0/SUCCESS)
        CPU: 5ms

Jan 30 16:49:47 localhost systemd[1]: Starting Record System Boot/Shutdown in UTMP...
Jan 30 16:49:47 localhost systemd[1]: Finished Record System Boot/Shutdown in UTMP.

● systemd-user-sessions.service - Permit User Sessions
     Loaded: loaded (/usr/lib/systemd/system/systemd-user-sessions.service; static)
     Active: active (exited) since Fri 2026-01-30 16:49:51 UTC; 3h 31min ago
       Docs: man:systemd-user-sessions.service(8)
   Main PID: 1006 (code=exited, status=0/SUCCESS)
        CPU: 6ms

Jan 30 16:49:51 np0005602930.novalocal systemd[1]: Starting Permit User Sessions...
Jan 30 16:49:51 np0005602930.novalocal systemd[1]: Finished Permit User Sessions.

○ systemd-vconsole-setup.service - Setup Virtual Console
     Loaded: loaded (/usr/lib/systemd/system/systemd-vconsole-setup.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:45 UTC; 3h 31min ago
   Duration: 1.719s
       Docs: man:systemd-vconsole-setup.service(8)
             man:vconsole.conf(5)
   Main PID: 317 (code=exited, status=0/SUCCESS)
        CPU: 197ms

Jan 30 16:49:43 localhost systemd[1]: Finished Setup Virtual Console.
Jan 30 16:49:45 localhost systemd[1]: systemd-vconsole-setup.service: Deactivated successfully.
Jan 30 16:49:45 localhost systemd[1]: Stopped Setup Virtual Console.
Notice: journal has been rotated since unit was started, output may be incomplete.

● tuned.service - Dynamic System Tuning Daemon
     Loaded: loaded (/usr/lib/systemd/system/tuned.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 17:26:23 UTC; 2h 54min ago
       Docs: man:tuned(8)
             man:tuned.conf(5)
             man:tuned-adm(8)
   Main PID: 43869 (tuned)
         IO: 0B read, 0B written
      Tasks: 4 (limit: 100092)
     Memory: 14.0M (peak: 15.9M)
        CPU: 2.875s
     CGroup: /system.slice/tuned.service
             └─43869 /usr/bin/python3 -Es /usr/sbin/tuned -l -P

Jan 30 17:26:23 compute-0 systemd[1]: Starting Dynamic System Tuning Daemon...
Jan 30 17:26:23 compute-0 systemd[1]: Started Dynamic System Tuning Daemon.

○ unbound-anchor.service - update of the root trust anchor for DNSSEC validation in unbound
     Loaded: loaded (/usr/lib/systemd/system/unbound-anchor.service; static)
     Active: inactive (dead)
TriggeredBy: ● unbound-anchor.timer
       Docs: man:unbound-anchor(8)

● user-runtime-dir@1000.service - User Runtime Directory /run/user/1000
     Loaded: loaded (/usr/lib/systemd/system/user-runtime-dir@.service; static)
     Active: active (exited) since Fri 2026-01-30 16:50:03 UTC; 3h 31min ago
       Docs: man:user@.service(5)
   Main PID: 4306 (code=exited, status=0/SUCCESS)
        CPU: 10ms

Jan 30 16:50:03 np0005602930.novalocal systemd[1]: Starting User Runtime Directory /run/user/1000...
Jan 30 16:50:03 np0005602930.novalocal systemd[1]: Finished User Runtime Directory /run/user/1000.

● user@1000.service - User Manager for UID 1000
     Loaded: loaded (/usr/lib/systemd/system/user@.service; static)
    Drop-In: /usr/lib/systemd/system/user@.service.d
             └─10-login-barrier.conf
     Active: active (running) since Fri 2026-01-30 16:50:03 UTC; 3h 31min ago
       Docs: man:user@.service(5)
   Main PID: 4307 (systemd)
     Status: "Ready."
         IO: 676.0K read, 4.0K written
      Tasks: 5
     Memory: 9.2M (peak: 13.0M)
        CPU: 1.259s
     CGroup: /user.slice/user-1000.slice/user@1000.service
             ├─app.slice
             │ └─dbus-broker.service
             │   ├─15953 /usr/bin/dbus-broker-launch --scope user
             │   └─15968 dbus-broker --log 4 --controller 9 --machine-id bf0bc0bb03de29b24cba1cc9599cf5d0 --max-bytes 100000000000000 --max-fds 25000000000000 --max-matches 5000000000
             ├─init.scope
             │ ├─4307 /usr/lib/systemd/systemd --user
             │ └─4309 "(sd-pam)"
             └─user.slice
               └─podman-pause-bff822aa.scope
                 └─15896 catatonit -P

Jan 30 17:02:38 np0005602930.novalocal dbus-broker-launch[15953]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored
Jan 30 17:02:38 np0005602930.novalocal dbus-broker-launch[15953]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored
Jan 30 17:02:38 np0005602930.novalocal systemd[4307]: Started D-Bus User Message Bus.
Jan 30 17:02:38 np0005602930.novalocal dbus-broker-lau[15953]: Ready
Jan 30 17:02:38 np0005602930.novalocal systemd[4307]: selinux: avc:  op=load_policy lsm=selinux seqno=4 res=1
Jan 30 17:02:38 np0005602930.novalocal systemd[4307]: Created slice Slice /user.
Jan 30 17:02:38 np0005602930.novalocal systemd[4307]: podman-15886.scope: unit configures an IP firewall, but not running as root.
Jan 30 17:02:38 np0005602930.novalocal systemd[4307]: (This warning is only shown for the first unit using IP firewalling.)
Jan 30 17:02:38 np0005602930.novalocal systemd[4307]: Started podman-15886.scope.
Jan 30 17:02:39 np0005602930.novalocal systemd[4307]: Started podman-pause-bff822aa.scope.

○ virtinterfaced.service - libvirt interface daemon
     Loaded: loaded (/usr/lib/systemd/system/virtinterfaced.service; disabled; preset: disabled)
     Active: inactive (dead)
TriggeredBy: ○ virtinterfaced-admin.socket
             ○ virtinterfaced.socket
             ○ virtinterfaced-ro.socket
       Docs: man:virtinterfaced(8)
             https://libvirt.org/

○ virtlockd.service - libvirt locking daemon
     Loaded: loaded (/usr/lib/systemd/system/virtlockd.service; disabled; preset: disabled)
     Active: inactive (dead)
TriggeredBy: ● virtlockd.socket
             ○ virtlockd-admin.socket
       Docs: man:virtlockd(8)
             https://libvirt.org/

● virtlogd.service - libvirt logging daemon
     Loaded: loaded (/usr/lib/systemd/system/virtlogd.service; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:39:30 UTC; 2h 41min ago
TriggeredBy: ● virtlogd.socket
             ● virtlogd-admin.socket
       Docs: man:virtlogd(8)
             https://libvirt.org/
   Main PID: 153807 (virtlogd)
         IO: 644.0K read, 127.3M written
      Tasks: 1 (limit: 100092)
     Memory: 18.3M (peak: 22.9M)
        CPU: 30min 19.607s
     CGroup: /system.slice/virtlogd.service
             └─153807 /usr/sbin/virtlogd

Jan 30 17:39:30 compute-0 systemd[1]: Starting libvirt logging daemon...
Jan 30 17:39:30 compute-0 systemd[1]: Started libvirt logging daemon.

○ virtnetworkd.service - libvirt network daemon
     Loaded: loaded (/usr/lib/systemd/system/virtnetworkd.service; disabled; preset: disabled)
     Active: inactive (dead)
TriggeredBy: ○ virtnetworkd-ro.socket
             ○ virtnetworkd.socket
             ○ virtnetworkd-admin.socket
       Docs: man:virtnetworkd(8)
             https://libvirt.org/

● virtnodedevd.service - libvirt nodedev daemon
     Loaded: loaded (/usr/lib/systemd/system/virtnodedevd.service; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:42:20 UTC; 2h 38min ago
TriggeredBy: ● virtnodedevd-ro.socket
             ● virtnodedevd.socket
             ● virtnodedevd-admin.socket
       Docs: man:virtnodedevd(8)
             https://libvirt.org/
   Main PID: 183033 (virtnodedevd)
         IO: 0B read, 0B written
      Tasks: 20 (limit: 100092)
     Memory: 5.8M (peak: 7.3M)
        CPU: 7.439s
     CGroup: /system.slice/virtnodedevd.service
             └─183033 /usr/sbin/virtnodedevd --timeout 120

Jan 30 18:02:55 compute-0 virtnodedevd[183033]: libvirt version: 11.10.0, package: 2.el9 (builder@centos.org, 2025-12-18-15:09:54, )
Jan 30 18:02:55 compute-0 virtnodedevd[183033]: hostname: compute-0
Jan 30 18:02:55 compute-0 virtnodedevd[183033]: ethtool ioctl error on tap4c9ab7f1-62: No such device
Jan 30 18:02:55 compute-0 virtnodedevd[183033]: ethtool ioctl error on tap4c9ab7f1-62: No such device
Jan 30 18:02:55 compute-0 virtnodedevd[183033]: ethtool ioctl error on tap4c9ab7f1-62: No such device
Jan 30 18:02:55 compute-0 virtnodedevd[183033]: ethtool ioctl error on tap4c9ab7f1-62: No such device
Jan 30 18:02:56 compute-0 virtnodedevd[183033]: ethtool ioctl error on tap4c9ab7f1-62: No such device
Jan 30 18:02:56 compute-0 virtnodedevd[183033]: ethtool ioctl error on tap4c9ab7f1-62: No such device
Jan 30 18:02:56 compute-0 virtnodedevd[183033]: ethtool ioctl error on tap4c9ab7f1-62: No such device
Jan 30 18:02:56 compute-0 virtnodedevd[183033]: ethtool ioctl error on tap4c9ab7f1-62: No such device

○ virtnwfilterd.service - libvirt nwfilter daemon
     Loaded: loaded (/usr/lib/systemd/system/virtnwfilterd.service; disabled; preset: disabled)
     Active: inactive (dead)
TriggeredBy: ○ virtnwfilterd-ro.socket
             ○ virtnwfilterd.socket
             ○ virtnwfilterd-admin.socket
       Docs: man:virtnwfilterd(8)
             https://libvirt.org/

○ virtproxyd.service - libvirt proxy daemon
     Loaded: loaded (/usr/lib/systemd/system/virtproxyd.service; enabled; preset: disabled)
     Active: inactive (dead) since Fri 2026-01-30 19:26:27 UTC; 54min ago
   Duration: 2min 1.636s
TriggeredBy: ● virtproxyd-ro.socket
             ● virtproxyd-admin.socket
             ● virtproxyd-tls.socket
             ● virtproxyd.socket
       Docs: man:virtproxyd(8)
             https://libvirt.org/
    Process: 245222 ExecStart=/usr/sbin/virtproxyd $VIRTPROXYD_ARGS (code=exited, status=0/SUCCESS)
   Main PID: 245222 (code=exited, status=0/SUCCESS)
        CPU: 68ms

Jan 30 19:24:25 compute-0 systemd[1]: Starting libvirt proxy daemon...
Jan 30 19:24:25 compute-0 systemd[1]: Started libvirt proxy daemon.
Jan 30 19:26:27 compute-0 systemd[1]: virtproxyd.service: Deactivated successfully.

● virtqemud.service - libvirt QEMU daemon
     Loaded: loaded (/usr/lib/systemd/system/virtqemud.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 17:42:19 UTC; 2h 39min ago
TriggeredBy: ● virtqemud-ro.socket
             ● virtqemud.socket
             ● virtqemud-admin.socket
       Docs: man:virtqemud(8)
             https://libvirt.org/
   Main PID: 182740 (virtqemud)
         IO: 1.1M read, 1.2M written
      Tasks: 21 (limit: 32768)
     Memory: 25.6M (peak: 46.0M)
        CPU: 23.929s
     CGroup: /system.slice/virtqemud.service
             └─182740 /usr/sbin/virtqemud --timeout 120

Jan 30 17:42:19 compute-0 systemd[1]: Started libvirt QEMU daemon.
Jan 30 17:42:21 compute-0 virtqemud[182740]: libvirt version: 11.10.0, package: 2.el9 (builder@centos.org, 2025-12-18-15:09:54, )
Jan 30 17:42:21 compute-0 virtqemud[182740]: hostname: compute-0
Jan 30 17:42:21 compute-0 virtqemud[182740]: End of file while reading data: Input/output error
Jan 30 19:21:14 compute-0 virtqemud[182740]: Domain id=24 name='instance-00000030' uuid=80e17086-10d6-4f5d-95b2-6fecb811e5e4 is tainted: custom-monitor
Jan 30 19:24:34 compute-0 virtqemud[182740]: Domain id=26 name='instance-00000031' uuid=c4391d6d-45bd-419f-8e51-75550c13204f is tainted: custom-monitor
Jan 30 20:21:01 compute-0 virtqemud[182740]: Failed to connect socket to '/var/run/libvirt/virtnetworkd-sock-ro': No such file or directory
Jan 30 20:21:01 compute-0 virtqemud[182740]: Failed to connect socket to '/var/run/libvirt/virtnwfilterd-sock-ro': No such file or directory
Jan 30 20:21:01 compute-0 virtqemud[182740]: Failed to connect socket to '/var/run/libvirt/virtstoraged-sock-ro': No such file or directory
Jan 30 20:21:18 compute-0 virtqemud[182740]: Failed to connect socket to '/var/run/libvirt/virtstoraged-sock-ro': No such file or directory

○ virtsecretd.service - libvirt secret daemon
     Loaded: loaded (/usr/lib/systemd/system/virtsecretd.service; enabled; preset: disabled)
     Active: inactive (dead) since Fri 2026-01-30 17:41:34 UTC; 2h 39min ago
   Duration: 2min 29ms
TriggeredBy: ● virtsecretd-admin.socket
             ● virtsecretd.socket
         Unit ypbind.service could not be found.
Unit yppasswdd.service could not be found.
Unit ypserv.service could not be found.
Unit ypxfrd.service could not be found.
    ● virtsecretd-ro.socket
       Docs: man:virtsecretd(8)
             https://libvirt.org/
   Main PID: 154655 (code=exited, status=0/SUCCESS)
        CPU: 34ms

Jan 30 17:39:34 compute-0 systemd[1]: Starting libvirt secret daemon...
Jan 30 17:39:34 compute-0 systemd[1]: Started libvirt secret daemon.
Jan 30 17:41:34 compute-0 systemd[1]: virtsecretd.service: Deactivated successfully.

○ virtstoraged.service - libvirt storage daemon
     Loaded: loaded (/usr/lib/systemd/system/virtstoraged.service; disabled; preset: disabled)
     Active: inactive (dead)
TriggeredBy: ○ virtstoraged.socket
             ○ virtstoraged-admin.socket
             ○ virtstoraged-ro.socket
       Docs: man:virtstoraged(8)
             https://libvirt.org/

● -.slice - Root Slice
     Loaded: loaded
     Active: active since Fri 2026-01-30 16:49:42 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:42 UTC; 3h 31min ago
       Docs: man:systemd.special(7)
      Tasks: 546
     Memory: 5.1G
        CPU: 12h 46min 3.810s
     CGroup: /
             ├─267250 turbostat --debug sleep 10
             ├─267256 sleep 10
             ├─init.scope
             │ └─1 /usr/lib/systemd/systemd --switched-root --system --deserialize 31
             ├─machine.slice
             │ ├─libpod-573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b.scope
             │ │ └─container
             │ │   ├─104655 dumb-init --single-child -- kolla_start
             │ │   ├─104658 "neutron-ovn-metadata-agent (/usr/bin/python3 /usr/bin/neutron-ovn-metadata-agent)"
             │ │   ├─104880 "neutron-ovn-metadata-agent (/usr/bin/python3 /usr/bin/neutron-ovn-metadata-agent)"
             │ │   ├─104920 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.namespace_cmd --privsep_sock_path /tmp/tmpp73u3fin/privsep.sock
             │ │   ├─212547 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.default --privsep_sock_path /tmp/tmpr5q8om25/privsep.sock
             │ │   └─212743 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.link_cmd --privsep_sock_path /tmp/tmp3la6aon7/privsep.sock
             │ ├─libpod-60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5.scope
             │ │ └─container
             │ │   ├─192801 dumb-init --single-child -- kolla_start
             │ │   ├─192804 "ceilometer-polling: master process [/usr/bin/ceilometer-polling --polling-namespaces compute --logfile /dev/stdout]"
             │ │   └─192931 "ceilometer-polling: AgentManager worker(0)"
             │ ├─libpod-60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97.scope
             │ │ └─container
             │ │   ├─95418 dumb-init --single-child -- kolla_start
             │ │   └─95421 /usr/bin/ovn-controller --pidfile unix:/run/openvswitch/db.sock -p /etc/pki/tls/private/ovndb.key -c /etc/pki/tls/certs/ovndb.crt -C /etc/pki/tls/certs/ovndbca.crt
             │ ├─libpod-8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a.scope
             │ │ └─container
             │ │   └─202050 /app/openstack-network-exporter
             │ ├─libpod-925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e.scope
             │ │ └─container
             │ │   ├─183117 dumb-init --single-child -- kolla_start
             │ │   ├─183119 /usr/bin/python3 /usr/bin/nova-compute
             │ │   ├─212621 /usr/bin/python3 /bin/privsep-helper --config-file /etc/nova/nova.conf --config-file /etc/nova/nova-compute.conf --config-dir /etc/nova/nova.conf.d --privsep_context nova.privsep.sys_admin_pctxt --privsep_sock_path /tmp/tmp5s1ioym_/privsep.sock
             │ │   └─212642 /usr/bin/python3 /bin/privsep-helper --config-file /etc/nova/nova.conf --config-file /etc/nova/nova-compute.conf --config-dir /etc/nova/nova.conf.d --privsep_context vif_plug_ovs.privsep.vif_plug --privsep_sock_path /tmp/tmpmo51pq8n/privsep.sock
             │ ├─libpod-ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735.scope
             │ │ └─container
             │ │   └─198927 /bin/podman_exporter --web.config.file=/etc/podman_exporter/podman_exporter.yaml
             │ ├─libpod-e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0.scope
             │ │ └─container
             │ │   └─195798 /bin/node_exporter --web.config.file=/etc/node_exporter/node_exporter.yaml --web.disable-exporter-metrics --collector.systemd "--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service" --no-collector.dmi --no-collector.entropy --no-collector.thermal_zone --no-collector.time --no-collector.timex --no-collector.uname --no-collector.stat --no-collector.hwmon --no-collector.os --no-collector.selinux --no-collector.textfile --no-collector.powersupplyclass --no-collector.pressure --no-collector.rapl
             │ ├─machine-qemu\x2d10\x2dinstance\x2d0000000f.scope
             │ │ └─libvirt
             │ │   └─216930 /usr/libexec/qemu-kvm -name guest=instance-0000000f,debug-threads=on -S -object "{\"qom-type\":\"secret\",\"id\":\"masterKey0\",\"format\":\"raw\",\"file\":\"/var/lib/libvirt/qemu/domain-10-instance-0000000f/master-key.aes\"}" -machine pc-q35-rhel9.8.0,usb=off,dump-guest-core=off,memory-backend=pc.ram,hpet=off,acpi=on -accel kvm -cpu EPYC-Rome,x2apic=on,tsc-deadline=on,hypervisor=on,tsc-adjust=on,spec-ctrl=on,stibp=on,ssbd=on,cmp-legacy=on,overflow-recov=on,succor=on,ibrs=on,amd-ssbd=on,virt-ssbd=on,lbrv=on,tsc-scale=on,vmcb-clean=on,flushbyasid=on,pause-filter=on,pfthreshold=on,svme-addr-chk=on,lfence-always-serializing=on,xsaves=off -m size=1048576k -object "{\"qom-type\":\"memory-backend-ram\",\"id\":\"pc.ram\",\"size\":1073741824}" -overcommit mem-lock=off -smp 1,sockets=1,dies=1,clusters=1,cores=1,threads=1 -uuid 69c40217-ae22-4704-ad01-f2ca06c42d58 -smbios "type=1,manufacturer=RDO,product=OpenStack Compute,version=27.5.2-0.20260127144738.eaa65f0.el9,serial=69c40217-ae22-4704-ad01-f2ca06c42d58,uuid=69c40217-ae22-4704-ad01-f2ca06c42d58,family=Virtual Machine" -no-user-config -nodefaults -chardev socket,id=charmonitor,fd=33,server=on,wait=off -mon chardev=charmonitor,id=monitor,mode=control -rtc base=utc,driftfix=slew -global kvm-pit.lost_tick_policy=delay -no-shutdown -boot strict=on -device "{\"driver\":\"pcie-root-port\",\"port\":16,\"chassis\":1,\"id\":\"pci.1\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":17,\"chassis\":2,\"id\":\"pci.2\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":18,\"chassis\":3,\"id\":\"pci.3\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":19,\"chassis\":4,\"id\":\"pci.4\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":20,\"chassis\":5,\"id\":\"pci.5\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":21,\"chassis\":6,\"id\":\"pci.6\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":22,\"chassis\":7,\"id\":\"pci.7\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":23,\"chassis\":8,\"id\":\"pci.8\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":24,\"chassis\":9,\"id\":\"pci.9\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":25,\"chassis\":10,\"id\":\"pci.10\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":26,\"chassis\":11,\"id\":\"pci.11\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":27,\"chassis\":12,\"id\":\"pci.12\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":28,\"chassis\":13,\"id\":\"pci.13\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":29,\"chassis\":14,\"id\":\"pci.14\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":30,\"chassis\":15,\"id\":\"pci.15\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":31,\"chassis\":16,\"id\":\"pci.16\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":32,\"chassis\":17,\"id\":\"pci.17\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":33,\"chassis\":18,\"id\":\"pci.18\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":34,\"chassis\":19,\"id\":\"pci.19\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":35,\"chassis\":20,\"id\":\"pci.20\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":36,\"chassis\":21,\"id\":\"pci.21\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":37,\"chassis\":22,\"id\":\"pci.22\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":38,\"chassis\":23,\"id\":\"pci.23\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":39,\"chassis\":24,\"id\":\"pci.24\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":40,\"chassis\":25,\"id\":\"pci.25\",\"bus\":\"pcie.0\",\"addr\":\"0x5\"}" -device "{\"driver\":\"pcie-pci-bridge\",\"id\":\"pci.26\",\"bus\":\"pci.1\",\"addr\":\"0x0\"}" -device "{\"driver\":\"piix3-usb-uhci\",\"id\":\"usb\",\"bus\":\"pci.26\",\"addr\":\"0x1\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/_base/bbb3d8d61ef6b68186f44149e3aba39e4a3bf32e\",\"node-name\":\"libvirt-3-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/69c40217-ae22-4704-ad01-f2ca06c42d58/disk\",\"node-name\":\"libvirt-2-storage\",\"auto-read-only\":true,\"discard\":\"unmap\",\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"node-name\":\"libvirt-2-format\",\"read-only\":false,\"cache\":{\"direct\":true,\"no-flush\":false},\"driver\":\"qcow2\",\"file\":\"libvirt-2-storage\",\"backing\":\"libvirt-3-storage\"}" -device "{\"driver\":\"virtio-blk-pci\",\"bus\":\"pci.3\",\"addr\":\"0x0\",\"drive\":\"libvirt-2-format\",\"id\":\"virtio-disk0\",\"bootindex\":1,\"write-cache\":\"on\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/69c40217-ae22-4704-ad01-f2ca06c42d58/disk.config\",\"node-name\":\"libvirt-1-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -device "{\"driver\":\"ide-cd\",\"bus\":\"ide.0\",\"drive\":\"libvirt-1-storage\",\"id\":\"sata0-0-0\",\"write-cache\":\"on\"}" -netdev "{\"type\":\"tap\",\"fd\":\"39\",\"vhost\":true,\"vhostfd\":\"41\",\"id\":\"hostnet0\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet0\",\"id\":\"net0\",\"mac\":\"fa:16:3e:d1:43:c6\",\"bus\":\"pci.2\",\"addr\":\"0x0\"}" -add-fd set=0,fd=37,opaque=serial0-log -chardev pty,id=charserial0,logfile=/dev/fdset/0,logappend=on -device "{\"driver\":\"isa-serial\",\"chardev\":\"charserial0\",\"id\":\"serial0\",\"index\":0}" -device "{\"driver\":\"usb-tablet\",\"id\":\"input0\",\"bus\":\"usb.0\",\"port\":\"1\"}" -audiodev "{\"id\":\"audio1\",\"driver\":\"none\"}" -object "{\"qom-type\":\"tls-creds-x509\",\"id\":\"vnc-tls-creds0\",\"dir\":\"/etc/pki/qemu\",\"endpoint\":\"server\",\"verify-peer\":true}" -vnc "[::0]:2,tls-creds=vnc-tls-creds0,audiodev=audio1" -device "{\"driver\":\"virtio-vga\",\"id\":\"video0\",\"max_outputs\":1,\"bus\":\"pcie.0\",\"addr\":\"0x1\"}" -global ICH9-LPC.noreboot=off -watchdog-action reset -device "{\"driver\":\"virtio-balloon-pci\",\"id\":\"balloon0\",\"bus\":\"pci.4\",\"addr\":\"0x0\"}" -object "{\"qom-type\":\"rng-random\",\"id\":\"objrng0\",\"filename\":\"/dev/urandom\"}" -device "{\"driver\":\"virtio-rng-pci\",\"rng\":\"objrng0\",\"id\":\"rng0\",\"bus\":\"pci.5\",\"addr\":\"0x0\"}" -device "{\"driver\":\"vmcoreinfo\"}" -sandbox on,obsolete=deny,elevateprivileges=deny,spawn=deny,resourcecontrol=deny -msg timestamp=on
             │ └─machine-qemu\x2d9\x2dinstance\x2d0000000d.scope
             │   └─libvirt
             │     └─216871 /usr/libexec/qemu-kvm -name guest=instance-0000000d,debug-threads=on -S -object "{\"qom-type\":\"secret\",\"id\":\"masterKey0\",\"format\":\"raw\",\"file\":\"/var/lib/libvirt/qemu/domain-9-instance-0000000d/master-key.aes\"}" -machine pc-q35-rhel9.8.0,usb=off,dump-guest-core=off,memory-backend=pc.ram,hpet=off,acpi=on -accel kvm -cpu EPYC-Rome,x2apic=on,tsc-deadline=on,hypervisor=on,tsc-adjust=on,spec-ctrl=on,stibp=on,ssbd=on,cmp-legacy=on,overflow-recov=on,succor=on,ibrs=on,amd-ssbd=on,virt-ssbd=on,lbrv=on,tsc-scale=on,vmcb-clean=on,flushbyasid=on,pause-filter=on,pfthreshold=on,svme-addr-chk=on,lfence-always-serializing=on,xsaves=off -m size=1048576k -object "{\"qom-type\":\"memory-backend-ram\",\"id\":\"pc.ram\",\"size\":1073741824}" -overcommit mem-lock=off -smp 1,sockets=1,dies=1,clusters=1,cores=1,threads=1 -uuid 639167ea-9de1-4930-b106-5a6f4a1a260d -smbios "type=1,manufacturer=RDO,product=OpenStack Compute,version=27.5.2-0.20260127144738.eaa65f0.el9,serial=639167ea-9de1-4930-b106-5a6f4a1a260d,uuid=639167ea-9de1-4930-b106-5a6f4a1a260d,family=Virtual Machine" -no-user-config -nodefaults -chardev socket,id=charmonitor,fd=26,server=on,wait=off -mon chardev=charmonitor,id=monitor,mode=control -rtc base=utc,driftfix=slew -global kvm-pit.lost_tick_policy=delay -no-shutdown -boot strict=on -device "{\"driver\":\"pcie-root-port\",\"port\":16,\"chassis\":1,\"id\":\"pci.1\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":17,\"chassis\":2,\"id\":\"pci.2\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":18,\"chassis\":3,\"id\":\"pci.3\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":19,\"chassis\":4,\"id\":\"pci.4\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":20,\"chassis\":5,\"id\":\"pci.5\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":21,\"chassis\":6,\"id\":\"pci.6\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":22,\"chassis\":7,\"id\":\"pci.7\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":23,\"chassis\":8,\"id\":\"pci.8\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":24,\"chassis\":9,\"id\":\"pci.9\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":25,\"chassis\":10,\"id\":\"pci.10\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":26,\"chassis\":11,\"id\":\"pci.11\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":27,\"chassis\":12,\"id\":\"pci.12\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":28,\"chassis\":13,\"id\":\"pci.13\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":29,\"chassis\":14,\"id\":\"pci.14\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":30,\"chassis\":15,\"id\":\"pci.15\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":31,\"chassis\":16,\"id\":\"pci.16\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":32,\"chassis\":17,\"id\":\"pci.17\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":33,\"chassis\":18,\"id\":\"pci.18\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":34,\"chassis\":19,\"id\":\"pci.19\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":35,\"chassis\":20,\"id\":\"pci.20\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":36,\"chassis\":21,\"id\":\"pci.21\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":37,\"chassis\":22,\"id\":\"pci.22\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":38,\"chassis\":23,\"id\":\"pci.23\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":39,\"chassis\":24,\"id\":\"pci.24\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":40,\"chassis\":25,\"id\":\"pci.25\",\"bus\":\"pcie.0\",\"addr\":\"0x5\"}" -device "{\"driver\":\"pcie-pci-bridge\",\"id\":\"pci.26\",\"bus\":\"pci.1\",\"addr\":\"0x0\"}" -device "{\"driver\":\"piix3-usb-uhci\",\"id\":\"usb\",\"bus\":\"pci.26\",\"addr\":\"0x1\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/_base/bbb3d8d61ef6b68186f44149e3aba39e4a3bf32e\",\"node-name\":\"libvirt-3-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/639167ea-9de1-4930-b106-5a6f4a1a260d/disk\",\"node-name\":\"libvirt-2-storage\",\"auto-read-only\":true,\"discard\":\"unmap\",\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"node-name\":\"libvirt-2-format\",\"read-only\":false,\"cache\":{\"direct\":true,\"no-flush\":false},\"driver\":\"qcow2\",\"file\":\"libvirt-2-storage\",\"backing\":\"libvirt-3-storage\"}" -device "{\"driver\":\"virtio-blk-pci\",\"bus\":\"pci.3\",\"addr\":\"0x0\",\"drive\":\"libvirt-2-format\",\"id\":\"virtio-disk0\",\"bootindex\":1,\"write-cache\":\"on\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/639167ea-9de1-4930-b106-5a6f4a1a260d/disk.config\",\"node-name\":\"libvirt-1-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -device "{\"driver\":\"ide-cd\",\"bus\":\"ide.0\",\"drive\":\"libvirt-1-storage\",\"id\":\"sata0-0-0\",\"write-cache\":\"on\"}" -netdev "{\"type\":\"tap\",\"fd\":\"31\",\"vhost\":true,\"vhostfd\":\"36\",\"id\":\"hostnet0\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet0\",\"id\":\"net0\",\"mac\":\"fa:16:3e:72:3c:9d\",\"bus\":\"pci.2\",\"addr\":\"0x0\"}" -add-fd set=0,fd=29,opaque=serial0-log -chardev pty,id=charserial0,logfile=/dev/fdset/0,logappend=on -device "{\"driver\":\"isa-serial\",\"chardev\":\"charserial0\",\"id\":\"serial0\",\"index\":0}" -device "{\"driver\":\"usb-tablet\",\"id\":\"input0\",\"bus\":\"usb.0\",\"port\":\"1\"}" -audiodev "{\"id\":\"audio1\",\"driver\":\"none\"}" -object "{\"qom-type\":\"tls-creds-x509\",\"id\":\"vnc-tls-creds0\",\"dir\":\"/etc/pki/qemu\",\"endpoint\":\"server\",\"verify-peer\":true}" -vnc "[::0]:1,tls-creds=vnc-tls-creds0,audiodev=audio1" -device "{\"driver\":\"virtio-vga\",\"id\":\"video0\",\"max_outputs\":1,\"bus\":\"pcie.0\",\"addr\":\"0x1\"}" -global ICH9-LPC.noreboot=off -watchdog-action reset -device "{\"driver\":\"virtio-balloon-pci\",\"id\":\"balloon0\",\"bus\":\"pci.4\",\"addr\":\"0x0\"}" -object "{\"qom-type\":\"rng-random\",\"id\":\"objrng0\",\"filename\":\"/dev/urandom\"}" -device "{\"driver\":\"virtio-rng-pci\",\"rng\":\"objrng0\",\"id\":\"rng0\",\"bus\":\"pci.5\",\"addr\":\"0x0\"}" -device "{\"driver\":\"vmcoreinfo\"}" -sandbox on,obsolete=deny,elevateprivileges=deny,spawn=deny,resourcecontrol=deny -msg timestamp=on
             ├─system.slice
             │ ├─NetworkManager.service
             │ │ └─55516 /usr/sbin/NetworkManager --no-daemon
             │ ├─auditd.service
             │ │ ├─703 /sbin/auditd
             │ │ └─705 /usr/sbin/sedispatch
             │ ├─chronyd.service
             │ │ └─64957 /usr/sbin/chronyd -F 2
             │ ├─crond.service
             │ │ └─1008 /usr/sbin/crond -n
             │ ├─dbus-broker.service
             │ │ ├─745 /usr/bin/dbus-broker-launch --scope system --audit
             │ │ └─774 dbus-broker --log 4 --controller 9 --machine-id bf0bc0bb03de29b24cba1cc9599cf5d0 --max-bytes 536870912 --max-fds 4096 --max-matches 131072 --audit
             │ ├─edpm_ceilometer_agent_compute.service
             │ │ └─192799 /usr/bin/conmon --api-version 1 -c 60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5 -u 60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5/userdata -p /run/containers/storage/overlay-containers/60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5/userdata/pidfile -n ceilometer_agent_compute --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5/userdata/oci-log --conmon-pidfile /run/ceilometer_agent_compute.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5
             │ ├─edpm_node_exporter.service
             │ │ └─195796 /usr/bin/conmon --api-version 1 -c e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0 -u e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0/userdata -p /run/containers/storage/overlay-containers/e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0/userdata/pidfile -n node_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0/userdata/oci-log --conmon-pidfile /run/node_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0
             │ ├─edpm_nova_compute.service
             │ │ └─183115 /usr/bin/conmon --api-version 1 -c 925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e -u 925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e/userdata -p /run/containers/storage/overlay-containers/925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e/userdata/pidfile -n nova_compute --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e/userdata/oci-log --conmon-pidfile /run/nova_compute.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e
             │ ├─edpm_openstack_network_exporter.service
             │ │ └─202048 /usr/bin/conmon --api-version 1 -c 8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a -u 8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a/userdata -p /run/containers/storage/overlay-containers/8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a/userdata/pidfile -n openstack_network_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a/userdata/oci-log --conmon-pidfile /run/openstack_network_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a
             │ ├─edpm_ovn_controller.service
             │ │ └─95416 /usr/bin/conmon --api-version 1 -c 60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97 -u 60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97/userdata -p /run/containers/storage/overlay-containers/60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97/userdata/pidfile -n ovn_controller --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97/userdata/oci-log --conmon-pidfile /run/ovn_controller.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97
             │ ├─edpm_ovn_metadata_agent.service
             │ │ └─104653 /usr/bin/conmon --api-version 1 -c 573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b -u 573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b/userdata -p /run/containers/storage/overlay-containers/573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b/userdata/pidfile -n ovn_metadata_agent --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b/userdata/oci-log --conmon-pidfile /run/ovn_metadata_agent.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b
             │ ├─edpm_podman_exporter.service
             │ │ └─198925 /usr/bin/conmon --api-version 1 -c ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735 -u ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735/userdata -p /run/containers/storage/overlay-containers/ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735/userdata/pidfile -n podman_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735/userdata/oci-log --conmon-pidfile /run/podman_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735
             │ ├─gssproxy.service
             │ │ └─873 /usr/sbin/gssproxy -D
             │ ├─irqbalance.service
             │ │ └─781 /usr/sbin/irqbalance
             │ ├─iscsid.service
             │ │ └─168784 /usr/sbin/iscsid -f
             │ ├─multipathd.service
             │ │ └─168942 /sbin/multipathd -d -s
             │ ├─ovs-vswitchd.service
             │ │ └─53815 ovs-vswitchd unix:/var/run/openvswitch/db.sock -vconsole:emer -vsyslog:err -vfile:info --mlockall --user openvswitch:hugetlbfs --no-chdir --log-file=/var/log/openvswitch/ovs-vswitchd.log --pidfile=/var/run/openvswitch/ovs-vswitchd.pid --detach
             │ ├─ovsdb-server.service
             │ │ └─53733 ovsdb-server /etc/openvswitch/conf.db -vconsole:emer -vsyslog:err -vfile:info --remote=punix:/var/run/openvswitch/db.sock --private-key=db:Open_vSwitch,SSL,private_key --certificate=db:Open_vSwitch,SSL,certificate --bootstrap-ca-cert=db:Open_vSwitch,SSL,ca_cert --user openvswitch:hugetlbfs --no-chdir --log-file=/var/log/openvswitch/ovsdb-server.log --pidfile=/var/run/openvswitch/ovsdb-server.pid --detach
             │ ├─podman.service
             │ │ └─198936 /usr/bin/podman --log-level=info system service
             │ ├─polkit.service
             │ │ └─43692 /usr/lib/polkit-1/polkitd --no-debug
             │ ├─rpcbind.service
             │ │ └─701 /usr/bin/rpcbind -w -f
             │ ├─rsyslog.service
             │ │ └─1004 /usr/sbin/rsyslogd -n
             │ ├─sshd.service
             │ │ └─129563 "sshd: /usr/sbin/sshd -D [listener] 0 of 10-100 startups"
             │ ├─system-getty.slice
             │ │ └─getty@tty1.service
             │ │   └─1010 /sbin/agetty -o "-p -- \\u" --noclear - linux
             │ ├─system-serial\x2dgetty.slice
             │ │ └─serial-getty@ttyS0.service
             │ │   └─1011 /sbin/agetty -o "-p -- \\u" --keep-baud 115200,57600,38400,9600 - vt220
             │ ├─systemd-hostnamed.service
             │ │ └─265104 /usr/lib/systemd/systemd-hostnamed
             │ ├─systemd-journald.service
             │ │ └─679 /usr/lib/systemd/systemd-journald
             │ ├─systemd-logind.service
             │ │ └─791 /usr/lib/systemd/systemd-logind
             │ ├─systemd-machined.service
             │ │ └─154436 /usr/lib/systemd/systemd-machined
             │ ├─systemd-udevd.service
             │ │ └─udev
             │ │   └─731 /usr/lib/systemd/systemd-udevd
             │ ├─tuned.service
             │ │ └─43869 /usr/bin/python3 -Es /usr/sbin/tuned -l -P
             │ ├─virtlogd.service
             │ │ └─153807 /usr/sbin/virtlogd
             │ ├─virtnodedevd.service
             │ │ └─183033 /usr/sbin/virtnodedevd --timeout 120
             │ └─virtqemud.service
             │   └─182740 /usr/sbin/virtqemud --timeout 120
             └─user.slice
               └─user-1000.slice
                 ├─session-1.scope
                 │ └─4517 /usr/bin/python3
                 ├─session-100.scope
                 │ ├─245026 "sshd-session: zuul [priv]"
                 │ └─245029 "sshd-session: zuul@notty"
                 ├─session-102.scope
                 │ ├─245122 "sshd-session: zuul [priv]"
                 │ └─245125 "sshd-session: zuul@notty"
                 ├─session-103.scope
                 │ ├─245415 "sshd-session: zuul [priv]"
                 │ └─245418 "sshd-session: zuul@notty"
                 ├─session-105.scope
                 │ ├─245517 "sshd-session: zuul [priv]"
                 │ └─245520 "sshd-session: zuul@notty"
                 ├─session-108.scope
                 │ ├─245810 "sshd-session: zuul [priv]"
                 │ └─245829 "sshd-session: zuul@notty"
                 ├─session-110.scope
                 │ ├─245913 "sshd-session: zuul [priv]"
                 │ └─245929 "sshd-session: zuul@notty"
                 ├─session-111.scope
                 │ ├─245989 "sshd-session: zuul [priv]"
                 │ └─245992 "sshd-session: zuul@notty"
                 ├─session-113.scope
                 │ ├─246092 "sshd-session: zuul [priv]"
                 │ └─246095 "sshd-session: zuul@notty"
                 ├─session-120.scope
                 │ ├─246976 "sshd-session: zuul [priv]"
                 │ └─246979 "sshd-session: zuul@notty"
                 ├─session-122.scope
                 │ ├─247166 "sshd-session: zuul [priv]"
                 │ └─247169 "sshd-session: zuul@notty"
                 ├─session-124.scope
                 │ ├─247625 "sshd-session: zuul [priv]"
                 │ └─247628 "sshd-session: zuul@notty"
                 ├─session-126.scope
                 │ ├─247788 "sshd-session: zuul [priv]"
                 │ └─247791 "sshd-session: zuul@notty"
                 ├─session-128.scope
                 │ ├─248316 "sshd-session: zuul [priv]"
                 │ └─248319 "sshd-session: zuul@notty"
                 ├─session-130.scope
                 │ ├─248418 "sshd-session: zuul [priv]"
                 │ └─248421 "sshd-session: zuul@notty"
                 ├─session-131.scope
                 │ ├─248494 "sshd-session: zuul [priv]"
                 │ └─248497 "sshd-session: zuul@notty"
                 ├─session-133.scope
                 │ ├─248553 "sshd-session: zuul [priv]"
                 │ └─248568 "sshd-session: zuul@notty"
                 ├─session-134.scope
                 │ ├─248856 "sshd-session: zuul [priv]"
                 │ └─248883 "sshd-session: zuul@notty"
                 ├─session-136.scope
                 │ ├─249050 "sshd-session: zuul [priv]"
                 │ └─249053 "sshd-session: zuul@notty"
                 ├─session-158.scope
                 │ ├─264277 "sshd-session: zuul [priv]"
                 │ ├─264280 "sshd-session: zuul@notty"
                 │ ├─264281 sudo bash -c "rm -rf /var/tmp/sos-osp && mkdir /var/tmp/sos-osp && sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp  -p container,openstack_edpm,system,storage,virt"
                 │ ├─264314 /usr/bin/python3 -s /sbin/sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp -p container,openstack_edpm,system,storage,virt
                 │ ├─267249 timeout 15s turbostat --debug sleep 10
                 │ ├─267669 timeout 300s systemctl status --all
                 │ ├─267670 systemctl status --all
                 │ ├─267750 timeout 300s semanage boolean -l
                 │ ├─267751 timeout --foreground 300s virsh -r nodedev-dumpxml net_lo_00_00_00_00_00_00
                 │ ├─267752 /usr/bin/python3 -EsI /usr/sbin/semanage boolean -l
                 │ └─267753 virsh -r nodedev-dumpxml net_lo_00_00_00_00_00_00
                 ├─session-48.scope
                 │ ├─240767 "sshd-session: zuul [priv]"
                 │ └─240770 "sshd-session: zuul@notty"
                 ├─session-50.scope
                 │ ├─240930 "sshd-session: zuul [priv]"
                 │ └─240933 "sshd-session: zuul@notty"
                 ├─session-51.scope
                 │ ├─241003 "sshd-session: zuul [priv]"
                 │ └─241006 "sshd-session: zuul@notty"
                 ├─session-53.scope
                 │ ├─241063 "sshd-session: zuul [priv]"
                 │ └─241091 "sshd-session: zuul@notty"
                 ├─session-54.scope
                 │ ├─241146 "sshd-session: zuul [priv]"
                 │ └─241149 "sshd-session: zuul@notty"
                 ├─session-56.scope
                 │ ├─241295 "sshd-session: zuul [priv]"
                 │ └─241298 "sshd-session: zuul@notty"
                 ├─session-57.scope
                 │ ├─241344 "sshd-session: zuul [priv]"
                 │ └─241347 "sshd-session: zuul@notty"
                 ├─session-59.scope
                 │ ├─241444 "sshd-session: zuul [priv]"
                 │ └─241447 "sshd-session: zuul@notty"
                 ├─session-60.scope
                 │ ├─241519 "sshd-session: zuul [priv]"
                 │ └─241522 "sshd-session: zuul@notty"
                 ├─session-62.scope
                 │ ├─241662 "sshd-session: zuul [priv]"
                 │ └─241665 "sshd-session: zuul@notty"
                 ├─session-68.scope
                 │ ├─242466 "sshd-session: zuul [priv]"
                 │ └─242469 "sshd-session: zuul@notty"
                 ├─session-70.scope
                 │ ├─242564 "sshd-session: zuul [priv]"
                 │ └─242586 "sshd-session: zuul@notty"
                 ├─session-71.scope
                 │ ├─242638 "sshd-session: zuul [priv]"
                 │ └─242641 "sshd-session: zuul@notty"
                 ├─session-73.scope
                 │ ├─242742 "sshd-session: zuul [priv]"
                 │ └─242745 "sshd-session: zuul@notty"
                 ├─session-74.scope
                 │ ├─242772 "sshd-session: zuul [priv]"
                 │ └─242775 "sshd-session: zuul@notty"
                 ├─session-76.scope
                 │ ├─242832 "sshd-session: zuul [priv]"
                 │ └─242835 "sshd-session: zuul@notty"
                 ├─session-79.scope
                 │ ├─243425 "sshd-session: zuul [priv]"
                 │ └─243428 "sshd-session: zuul@notty"
                 ├─session-81.scope
                 │ ├─243524 "sshd-session: zuul [priv]"
                 │ └─243527 "sshd-session: zuul@notty"
                 ├─session-82.scope
                 │ ├─243554 "sshd-session: zuul [priv]"
                 │ └─243557 "sshd-session: zuul@notty"
                 ├─session-84.scope
                 │ ├─243613 "sshd-session: zuul [priv]"
                 │ └─243616 "sshd-session: zuul@notty"
                 ├─session-85.scope
                 │ ├─243643 "sshd-session: zuul [priv]"
                 │ └─243668 "sshd-session: zuul@notty"
                 ├─session-87.scope
                 │ ├─243790 "sshd-session: zuul [priv]"
                 │ └─243793 "sshd-session: zuul@notty"
                 ├─session-94.scope
                 │ ├─244430 "sshd-session: zuul [priv]"
                 │ └─244433 "sshd-session: zuul@notty"
                 ├─session-96.scope
                 │ ├─244575 "sshd-session: zuul [priv]"
                 │ └─244578 "sshd-session: zuul@notty"
                 ├─session-97.scope
                 │ ├─244785 "sshd-session: zuul [priv]"
                 │ └─244807 "sshd-session: zuul@notty"
                 ├─session-99.scope
                 │ ├─244996 "sshd-session: zuul [priv]"
                 │ └─244999 "sshd-session: zuul@notty"
                 └─user@1000.service
                   ├─app.slice
                   │ └─dbus-broker.service
                   │   ├─15953 /usr/bin/dbus-broker-launch --scope user
                   │   └─15968 dbus-broker --log 4 --controller 9 --machine-id bf0bc0bb03de29b24cba1cc9599cf5d0 --max-bytes 100000000000000 --max-fds 25000000000000 --max-matches 5000000000
                   ├─init.scope
                   │ ├─4307 /usr/lib/systemd/systemd --user
                   │ └─4309 "(sd-pam)"
                   └─user.slice
                     └─podman-pause-bff822aa.scope
                       └─15896 catatonit -P

Jan 30 20:20:56 compute-0 systemd[1]: Started Session 158 of User zuul.
Jan 30 20:21:01 compute-0 systemd[1]: proc-sys-fs-binfmt_misc.automount: Got automount request for /proc/sys/fs/binfmt_misc, triggered by 264314 (sos)
Jan 30 20:21:01 compute-0 systemd[1]: Mounting Arbitrary Executable File Formats File System...
Jan 30 20:21:01 compute-0 systemd[1]: Mounted Arbitrary Executable File Formats File System.
Jan 30 20:21:03 compute-0 systemd[1]: Started Session 159 of User zuul.
Jan 30 20:21:04 compute-0 systemd[1]: Started Session 160 of User zuul.
Jan 30 20:21:04 compute-0 systemd[1]: session-159.scope: Deactivated successfully.
Jan 30 20:21:04 compute-0 systemd[1]: session-160.scope: Deactivated successfully.
Jan 30 20:21:04 compute-0 systemd[1]: Starting Hostname Service...
Jan 30 20:21:04 compute-0 systemd[1]: Started Hostname Service.

● machine.slice - Virtual Machine and Container Slice
     Loaded: loaded (/usr/lib/systemd/system/machine.slice; static)
     Active: active since Fri 2026-01-30 17:34:05 UTC; 2h 47min ago
      Until: Fri 2026-01-30 17:34:05 UTC; 2h 47min ago
       Docs: man:systemd.special(7)
         IO: 141.1G read, 4.5G written
      Tasks: 112
     Memory: 3.2G (peak: 4.7G)
        CPU: 7h 3min 35.250s
     CGroup: /machine.slice
             ├─libpod-573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b.scope
             │ └─container
             │   ├─104655 dumb-init --single-child -- kolla_start
             │   ├─104658 "neutron-ovn-metadata-agent (/usr/bin/python3 /usr/bin/neutron-ovn-metadata-agent)"
             │   ├─104880 "neutron-ovn-metadata-agent (/usr/bin/python3 /usr/bin/neutron-ovn-metadata-agent)"
             │   ├─104920 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.namespace_cmd --privsep_sock_path /tmp/tmpp73u3fin/privsep.sock
             │   ├─212547 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.default --privsep_sock_path /tmp/tmpr5q8om25/privsep.sock
             │   └─212743 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.link_cmd --privsep_sock_path /tmp/tmp3la6aon7/privsep.sock
             ├─libpod-60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5.scope
             │ └─container
             │   ├─192801 dumb-init --single-child -- kolla_start
             │   ├─192804 "ceilometer-polling: master process [/usr/bin/ceilometer-polling --polling-namespaces compute --logfile /dev/stdout]"
             │   └─192931 "ceilometer-polling: AgentManager worker(0)"
             ├─libpod-60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97.scope
             │ └─container
             │   ├─95418 dumb-init --single-child -- kolla_start
             │   └─95421 /usr/bin/ovn-controller --pidfile unix:/run/openvswitch/db.sock -p /etc/pki/tls/private/ovndb.key -c /etc/pki/tls/certs/ovndb.crt -C /etc/pki/tls/certs/ovndbca.crt
             ├─libpod-8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a.scope
             │ └─container
             │   └─202050 /app/openstack-network-exporter
             ├─libpod-925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e.scope
             │ └─container
             │   ├─183117 dumb-init --single-child -- kolla_start
             │   ├─183119 /usr/bin/python3 /usr/bin/nova-compute
             │   ├─212621 /usr/bin/python3 /bin/privsep-helper --config-file /etc/nova/nova.conf --config-file /etc/nova/nova-compute.conf --config-dir /etc/nova/nova.conf.d --privsep_context nova.privsep.sys_admin_pctxt --privsep_sock_path /tmp/tmp5s1ioym_/privsep.sock
             │   └─212642 /usr/bin/python3 /bin/privsep-helper --config-file /etc/nova/nova.conf --config-file /etc/nova/nova-compute.conf --config-dir /etc/nova/nova.conf.d --privsep_context vif_plug_ovs.privsep.vif_plug --privsep_sock_path /tmp/tmpmo51pq8n/privsep.sock
             ├─libpod-ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735.scope
             │ └─container
             │   └─198927 /bin/podman_exporter --web.config.file=/etc/podman_exporter/podman_exporter.yaml
             ├─libpod-e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0.scope
             │ └─container
             │   └─195798 /bin/node_exporter --web.config.file=/etc/node_exporter/node_exporter.yaml --web.disable-exporter-metrics --collector.systemd "--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service" --no-collector.dmi --no-collector.entropy --no-collector.thermal_zone --no-collector.time --no-collector.timex --no-collector.uname --no-collector.stat --no-collector.hwmon --no-collector.os --no-collector.selinux --no-collector.textfile --no-collector.powersupplyclass --no-collector.pressure --no-collector.rapl
             ├─machine-qemu\x2d10\x2dinstance\x2d0000000f.scope
             │ └─libvirt
             │   └─216930 /usr/libexec/qemu-kvm -name guest=instance-0000000f,debug-threads=on -S -object "{\"qom-type\":\"secret\",\"id\":\"masterKey0\",\"format\":\"raw\",\"file\":\"/var/lib/libvirt/qemu/domain-10-instance-0000000f/master-key.aes\"}" -machine pc-q35-rhel9.8.0,usb=off,dump-guest-core=off,memory-backend=pc.ram,hpet=off,acpi=on -accel kvm -cpu EPYC-Rome,x2apic=on,tsc-deadline=on,hypervisor=on,tsc-adjust=on,spec-ctrl=on,stibp=on,ssbd=on,cmp-legacy=on,overflow-recov=on,succor=on,ibrs=on,amd-ssbd=on,virt-ssbd=on,lbrv=on,tsc-scale=on,vmcb-clean=on,flushbyasid=on,pause-filter=on,pfthreshold=on,svme-addr-chk=on,lfence-always-serializing=on,xsaves=off -m size=1048576k -object "{\"qom-type\":\"memory-backend-ram\",\"id\":\"pc.ram\",\"size\":1073741824}" -overcommit mem-lock=off -smp 1,sockets=1,dies=1,clusters=1,cores=1,threads=1 -uuid 69c40217-ae22-4704-ad01-f2ca06c42d58 -smbios "type=1,manufacturer=RDO,product=OpenStack Compute,version=27.5.2-0.20260127144738.eaa65f0.el9,serial=69c40217-ae22-4704-ad01-f2ca06c42d58,uuid=69c40217-ae22-4704-ad01-f2ca06c42d58,family=Virtual Machine" -no-user-config -nodefaults -chardev socket,id=charmonitor,fd=33,server=on,wait=off -mon chardev=charmonitor,id=monitor,mode=control -rtc base=utc,driftfix=slew -global kvm-pit.lost_tick_policy=delay -no-shutdown -boot strict=on -device "{\"driver\":\"pcie-root-port\",\"port\":16,\"chassis\":1,\"id\":\"pci.1\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":17,\"chassis\":2,\"id\":\"pci.2\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":18,\"chassis\":3,\"id\":\"pci.3\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":19,\"chassis\":4,\"id\":\"pci.4\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":20,\"chassis\":5,\"id\":\"pci.5\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":21,\"chassis\":6,\"id\":\"pci.6\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":22,\"chassis\":7,\"id\":\"pci.7\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":23,\"chassis\":8,\"id\":\"pci.8\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":24,\"chassis\":9,\"id\":\"pci.9\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":25,\"chassis\":10,\"id\":\"pci.10\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":26,\"chassis\":11,\"id\":\"pci.11\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":27,\"chassis\":12,\"id\":\"pci.12\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":28,\"chassis\":13,\"id\":\"pci.13\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":29,\"chassis\":14,\"id\":\"pci.14\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":30,\"chassis\":15,\"id\":\"pci.15\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":31,\"chassis\":16,\"id\":\"pci.16\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":32,\"chassis\":17,\"id\":\"pci.17\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":33,\"chassis\":18,\"id\":\"pci.18\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":34,\"chassis\":19,\"id\":\"pci.19\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":35,\"chassis\":20,\"id\":\"pci.20\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":36,\"chassis\":21,\"id\":\"pci.21\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":37,\"chassis\":22,\"id\":\"pci.22\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":38,\"chassis\":23,\"id\":\"pci.23\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":39,\"chassis\":24,\"id\":\"pci.24\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":40,\"chassis\":25,\"id\":\"pci.25\",\"bus\":\"pcie.0\",\"addr\":\"0x5\"}" -device "{\"driver\":\"pcie-pci-bridge\",\"id\":\"pci.26\",\"bus\":\"pci.1\",\"addr\":\"0x0\"}" -device "{\"driver\":\"piix3-usb-uhci\",\"id\":\"usb\",\"bus\":\"pci.26\",\"addr\":\"0x1\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/_base/bbb3d8d61ef6b68186f44149e3aba39e4a3bf32e\",\"node-name\":\"libvirt-3-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/69c40217-ae22-4704-ad01-f2ca06c42d58/disk\",\"node-name\":\"libvirt-2-storage\",\"auto-read-only\":true,\"discard\":\"unmap\",\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"node-name\":\"libvirt-2-format\",\"read-only\":false,\"cache\":{\"direct\":true,\"no-flush\":false},\"driver\":\"qcow2\",\"file\":\"libvirt-2-storage\",\"backing\":\"libvirt-3-storage\"}" -device "{\"driver\":\"virtio-blk-pci\",\"bus\":\"pci.3\",\"addr\":\"0x0\",\"drive\":\"libvirt-2-format\",\"id\":\"virtio-disk0\",\"bootindex\":1,\"write-cache\":\"on\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/69c40217-ae22-4704-ad01-f2ca06c42d58/disk.config\",\"node-name\":\"libvirt-1-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -device "{\"driver\":\"ide-cd\",\"bus\":\"ide.0\",\"drive\":\"libvirt-1-storage\",\"id\":\"sata0-0-0\",\"write-cache\":\"on\"}" -netdev "{\"type\":\"tap\",\"fd\":\"39\",\"vhost\":true,\"vhostfd\":\"41\",\"id\":\"hostnet0\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet0\",\"id\":\"net0\",\"mac\":\"fa:16:3e:d1:43:c6\",\"bus\":\"pci.2\",\"addr\":\"0x0\"}" -add-fd set=0,fd=37,opaque=serial0-log -chardev pty,id=charserial0,logfile=/dev/fdset/0,logappend=on -device "{\"driver\":\"isa-serial\",\"chardev\":\"charserial0\",\"id\":\"serial0\",\"index\":0}" -device "{\"driver\":\"usb-tablet\",\"id\":\"input0\",\"bus\":\"usb.0\",\"port\":\"1\"}" -audiodev "{\"id\":\"audio1\",\"driver\":\"none\"}" -object "{\"qom-type\":\"tls-creds-x509\",\"id\":\"vnc-tls-creds0\",\"dir\":\"/etc/pki/qemu\",\"endpoint\":\"server\",\"verify-peer\":true}" -vnc "[::0]:2,tls-creds=vnc-tls-creds0,audiodev=audio1" -device "{\"driver\":\"virtio-vga\",\"id\":\"video0\",\"max_outputs\":1,\"bus\":\"pcie.0\",\"addr\":\"0x1\"}" -global ICH9-LPC.noreboot=off -watchdog-action reset -device "{\"driver\":\"virtio-balloon-pci\",\"id\":\"balloon0\",\"bus\":\"pci.4\",\"addr\":\"0x0\"}" -object "{\"qom-type\":\"rng-random\",\"id\":\"objrng0\",\"filename\":\"/dev/urandom\"}" -device "{\"driver\":\"virtio-rng-pci\",\"rng\":\"objrng0\",\"id\":\"rng0\",\"bus\":\"pci.5\",\"addr\":\"0x0\"}" -device "{\"driver\":\"vmcoreinfo\"}" -sandbox on,obsolete=deny,elevateprivileges=deny,spawn=deny,resourcecontrol=deny -msg timestamp=on
             └─machine-qemu\x2d9\x2dinstance\x2d0000000d.scope
               └─libvirt
                 └─216871 /usr/libexec/qemu-kvm -name guest=instance-0000000d,debug-threads=on -S -object "{\"qom-type\":\"secret\",\"id\":\"masterKey0\",\"format\":\"raw\",\"file\":\"/var/lib/libvirt/qemu/domain-9-instance-0000000d/master-key.aes\"}" -machine pc-q35-rhel9.8.0,usb=off,dump-guest-core=off,memory-backend=pc.ram,hpet=off,acpi=on -accel kvm -cpu EPYC-Rome,x2apic=on,tsc-deadline=on,hypervisor=on,tsc-adjust=on,spec-ctrl=on,stibp=on,ssbd=on,cmp-legacy=on,overflow-recov=on,succor=on,ibrs=on,amd-ssbd=on,virt-ssbd=on,lbrv=on,tsc-scale=on,vmcb-clean=on,flushbyasid=on,pause-filter=on,pfthreshold=on,svme-addr-chk=on,lfence-always-serializing=on,xsaves=off -m size=1048576k -object "{\"qom-type\":\"memory-backend-ram\",\"id\":\"pc.ram\",\"size\":1073741824}" -overcommit mem-lock=off -smp 1,sockets=1,dies=1,clusters=1,cores=1,threads=1 -uuid 639167ea-9de1-4930-b106-5a6f4a1a260d -smbios "type=1,manufacturer=RDO,product=OpenStack Compute,version=27.5.2-0.20260127144738.eaa65f0.el9,serial=639167ea-9de1-4930-b106-5a6f4a1a260d,uuid=639167ea-9de1-4930-b106-5a6f4a1a260d,family=Virtual Machine" -no-user-config -nodefaults -chardev socket,id=charmonitor,fd=26,server=on,wait=off -mon chardev=charmonitor,id=monitor,mode=control -rtc base=utc,driftfix=slew -global kvm-pit.lost_tick_policy=delay -no-shutdown -boot strict=on -device "{\"driver\":\"pcie-root-port\",\"port\":16,\"chassis\":1,\"id\":\"pci.1\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":17,\"chassis\":2,\"id\":\"pci.2\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":18,\"chassis\":3,\"id\":\"pci.3\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":19,\"chassis\":4,\"id\":\"pci.4\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":20,\"chassis\":5,\"id\":\"pci.5\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":21,\"chassis\":6,\"id\":\"pci.6\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":22,\"chassis\":7,\"id\":\"pci.7\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":23,\"chassis\":8,\"id\":\"pci.8\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":24,\"chassis\":9,\"id\":\"pci.9\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":25,\"chassis\":10,\"id\":\"pci.10\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":26,\"chassis\":11,\"id\":\"pci.11\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":27,\"chassis\":12,\"id\":\"pci.12\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":28,\"chassis\":13,\"id\":\"pci.13\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":29,\"chassis\":14,\"id\":\"pci.14\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":30,\"chassis\":15,\"id\":\"pci.15\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":31,\"chassis\":16,\"id\":\"pci.16\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":32,\"chassis\":17,\"id\":\"pci.17\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":33,\"chassis\":18,\"id\":\"pci.18\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":34,\"chassis\":19,\"id\":\"pci.19\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":35,\"chassis\":20,\"id\":\"pci.20\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":36,\"chassis\":21,\"id\":\"pci.21\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":37,\"chassis\":22,\"id\":\"pci.22\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":38,\"chassis\":23,\"id\":\"pci.23\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":39,\"chassis\":24,\"id\":\"pci.24\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":40,\"chassis\":25,\"id\":\"pci.25\",\"bus\":\"pcie.0\",\"addr\":\"0x5\"}" -device "{\"driver\":\"pcie-pci-bridge\",\"id\":\"pci.26\",\"bus\":\"pci.1\",\"addr\":\"0x0\"}" -device "{\"driver\":\"piix3-usb-uhci\",\"id\":\"usb\",\"bus\":\"pci.26\",\"addr\":\"0x1\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/_base/bbb3d8d61ef6b68186f44149e3aba39e4a3bf32e\",\"node-name\":\"libvirt-3-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/639167ea-9de1-4930-b106-5a6f4a1a260d/disk\",\"node-name\":\"libvirt-2-storage\",\"auto-read-only\":true,\"discard\":\"unmap\",\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"node-name\":\"libvirt-2-format\",\"read-only\":false,\"cache\":{\"direct\":true,\"no-flush\":false},\"driver\":\"qcow2\",\"file\":\"libvirt-2-storage\",\"backing\":\"libvirt-3-storage\"}" -device "{\"driver\":\"virtio-blk-pci\",\"bus\":\"pci.3\",\"addr\":\"0x0\",\"drive\":\"libvirt-2-format\",\"id\":\"virtio-disk0\",\"bootindex\":1,\"write-cache\":\"on\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/639167ea-9de1-4930-b106-5a6f4a1a260d/disk.config\",\"node-name\":\"libvirt-1-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -device "{\"driver\":\"ide-cd\",\"bus\":\"ide.0\",\"drive\":\"libvirt-1-storage\",\"id\":\"sata0-0-0\",\"write-cache\":\"on\"}" -netdev "{\"type\":\"tap\",\"fd\":\"31\",\"vhost\":true,\"vhostfd\":\"36\",\"id\":\"hostnet0\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet0\",\"id\":\"net0\",\"mac\":\"fa:16:3e:72:3c:9d\",\"bus\":\"pci.2\",\"addr\":\"0x0\"}" -add-fd set=0,fd=29,opaque=serial0-log -chardev pty,id=charserial0,logfile=/dev/fdset/0,logappend=on -device "{\"driver\":\"isa-serial\",\"chardev\":\"charserial0\",\"id\":\"serial0\",\"index\":0}" -device "{\"driver\":\"usb-tablet\",\"id\":\"input0\",\"bus\":\"usb.0\",\"port\":\"1\"}" -audiodev "{\"id\":\"audio1\",\"driver\":\"none\"}" -object "{\"qom-type\":\"tls-creds-x509\",\"id\":\"vnc-tls-creds0\",\"dir\":\"/etc/pki/qemu\",\"endpoint\":\"server\",\"verify-peer\":true}" -vnc "[::0]:1,tls-creds=vnc-tls-creds0,audiodev=audio1" -device "{\"driver\":\"virtio-vga\",\"id\":\"video0\",\"max_outputs\":1,\"bus\":\"pcie.0\",\"addr\":\"0x1\"}" -global ICH9-LPC.noreboot=off -watchdog-action reset -device "{\"driver\":\"virtio-balloon-pci\",\"id\":\"balloon0\",\"bus\":\"pci.4\",\"addr\":\"0x0\"}" -object "{\"qom-type\":\"rng-random\",\"id\":\"objrng0\",\"filename\":\"/dev/urandom\"}" -device "{\"driver\":\"virtio-rng-pci\",\"rng\":\"objrng0\",\"id\":\"rng0\",\"bus\":\"pci.5\",\"addr\":\"0x0\"}" -device "{\"driver\":\"vmcoreinfo\"}" -sandbox on,obsolete=deny,elevateprivileges=deny,spawn=deny,resourcecontrol=deny -msg timestamp=on

Jan 30 20:07:07 compute-0 neutron-haproxy-ovnmeta-68e01440-667d-4947-88bc-8bc355005035[260370]: [NOTICE]   (260398) : Loading success.
Jan 30 20:15:29 compute-0 neutron-haproxy-ovnmeta-68e01440-667d-4947-88bc-8bc355005035[260370]: [NOTICE]   (260398) : haproxy version is 2.8.14-c23fe91
Jan 30 20:15:29 compute-0 neutron-haproxy-ovnmeta-68e01440-667d-4947-88bc-8bc355005035[260370]: [NOTICE]   (260398) : path to executable is /usr/sbin/haproxy
Jan 30 20:15:29 compute-0 neutron-haproxy-ovnmeta-68e01440-667d-4947-88bc-8bc355005035[260370]: [WARNING]  (260398) : Exiting Master process...
Jan 30 20:15:29 compute-0 neutron-haproxy-ovnmeta-68e01440-667d-4947-88bc-8bc355005035[260370]: [WARNING]  (260398) : Exiting Master process...
Jan 30 20:15:29 compute-0 neutron-haproxy-ovnmeta-68e01440-667d-4947-88bc-8bc355005035[260370]: [ALERT]    (260398) : Current worker (260403) exited with code 143 (Terminated)
Jan 30 20:15:29 compute-0 neutron-haproxy-ovnmeta-68e01440-667d-4947-88bc-8bc355005035[260370]: [WARNING]  (260398) : All workers exited. Exiting... (0)
Jan 30 20:15:29 compute-0 podman[262634]: 2026-01-30 20:15:29.957885888 +0000 UTC m=+0.058231360 container died 36586e9dffff858f22e42e79db799608fac647ab6e0fc6c469411c00674cfc27 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-68e01440-667d-4947-88bc-8bc355005035, tcib_managed=true, org.label-schema.build-date=20260127, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS)
Jan 30 20:15:30 compute-0 podman[262634]: 2026-01-30 20:15:30.015390094 +0000 UTC m=+0.115735566 container cleanup 36586e9dffff858f22e42e79db799608fac647ab6e0fc6c469411c00674cfc27 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-68e01440-667d-4947-88bc-8bc355005035, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, org.label-schema.vendor=CentOS, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20260127, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Jan 30 20:15:30 compute-0 podman[262678]: 2026-01-30 20:15:30.077506034 +0000 UTC m=+0.046524504 container remove 36586e9dffff858f22e42e79db799608fac647ab6e0fc6c469411c00674cfc27 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-68e01440-667d-4947-88bc-8bc355005035, org.label-schema.vendor=CentOS, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20260127, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, org.label-schema.license=GPLv2)

● system-dbus\x2d:1.1\x2dorg.fedoraproject.SetroubleshootPrivileged.slice - Slice /system/dbus-:1.1-org.fedoraproject.SetroubleshootPrivileged
     Loaded: loaded
     Active: active since Fri 2026-01-30 17:39:33 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:33 UTC; 2h 41min ago
         IO: 4.0K read, 0B written
      Tasks: 0
     Memory: 8.0K (peak: 58.7M)
        CPU: 947ms
     CGroup: /system.slice/system-dbus\x2d:1.1\x2dorg.fedoraproject.SetroubleshootPrivileged.slice

Jan 30 17:39:33 compute-0 systemd[1]: Created slice Slice /system/dbus-:1.1-org.fedoraproject.SetroubleshootPrivileged.

● system-getty.slice - Slice /system/getty
     Loaded: loaded
     Active: active since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
         IO: 0B read, 0B written
      Tasks: 1
     Memory: 240.0K (peak: 460.0K)
        CPU: 5ms
     CGroup: /system.slice/system-getty.slice
             └─getty@tty1.service
               └─1010 /sbin/agetty -o "-p -- \\u" --noclear - linux

● system-modprobe.slice - Slice /system/modprobe
     Loaded: loaded
     Active: active since Fri 2026-01-30 16:49:43 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:43 UTC; 3h 31min ago
         IO: 0B read, 0B written
      Tasks: 0
     Memory: 412.0K (peak: 11.1M)
        CPU: 150ms
     CGroup: /system.slice/system-modprobe.slice

Jan 30 16:49:43 localhost systemd[1]: Created slice Slice /system/modprobe.

● system-serial\x2dgetty.slice - Slice /system/serial-getty
     Loaded: loaded
     Active: active since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
         IO: 0B read, 0B written
      Tasks: 1
     Memory: 320.0K (peak: 564.0K)
        CPU: 7ms
     CGroup: /system.slice/system-serial\x2dgetty.slice
             └─serial-getty@ttyS0.service
               └─1011 /sbin/agetty -o "-p -- \\u" --keep-baud 115200,57600,38400,9600 - vt220

● system-sshd\x2dkeygen.slice - Slice /system/sshd-keygen
     Loaded: loaded
     Active: active since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
         IO: 0B read, 0B written
      Tasks: 0
     Memory: 0B (peak: 0B)
        CPU: 0
     CGroup: /system.slice/system-sshd\x2dkeygen.slice

● system.slice - System Slice
     Loaded: loaded
     Active: active since Fri 2026-01-30 16:49:42 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:42 UTC; 3h 31min ago
       Docs: man:systemd.special(7)
         IO: 58.5M read, 265.7M written
      Tasks: 133
     Memory: 939.7M (peak: 983.8M)
        CPU: 38min 40.004s
     CGroup: /system.slice
             ├─NetworkManager.service
             │ └─55516 /usr/sbin/NetworkManager --no-daemon
             ├─auditd.service
             │ ├─703 /sbin/auditd
             │ └─705 /usr/sbin/sedispatch
             ├─chronyd.service
             │ └─64957 /usr/sbin/chronyd -F 2
             ├─crond.service
             │ └─1008 /usr/sbin/crond -n
             ├─dbus-broker.service
             │ ├─745 /usr/bin/dbus-broker-launch --scope system --audit
             │ └─774 dbus-broker --log 4 --controller 9 --machine-id bf0bc0bb03de29b24cba1cc9599cf5d0 --max-bytes 536870912 --max-fds 4096 --max-matches 131072 --audit
             ├─edpm_ceilometer_agent_compute.service
             │ └─192799 /usr/bin/conmon --api-version 1 -c 60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5 -u 60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5/userdata -p /run/containers/storage/overlay-containers/60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5/userdata/pidfile -n ceilometer_agent_compute --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5/userdata/oci-log --conmon-pidfile /run/ceilometer_agent_compute.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5
             ├─edpm_node_exporter.service
             │ └─195796 /usr/bin/conmon --api-version 1 -c e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0 -u e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0/userdata -p /run/containers/storage/overlay-containers/e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0/userdata/pidfile -n node_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0/userdata/oci-log --conmon-pidfile /run/node_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0
             ├─edpm_nova_compute.service
             │ └─183115 /usr/bin/conmon --api-version 1 -c 925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e -u 925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e/userdata -p /run/containers/storage/overlay-containers/925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e/userdata/pidfile -n nova_compute --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e/userdata/oci-log --conmon-pidfile /run/nova_compute.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 925ff7e3b4039e4c5d83bde5f3c68c16c1a695e578352baba5cb792427c3386e
             ├─edpm_openstack_network_exporter.service
             │ └─202048 /usr/bin/conmon --api-version 1 -c 8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a -u 8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a/userdata -p /run/containers/storage/overlay-containers/8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a/userdata/pidfile -n openstack_network_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a/userdata/oci-log --conmon-pidfile /run/openstack_network_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a
             ├─edpm_ovn_controller.service
             │ └─95416 /usr/bin/conmon --api-version 1 -c 60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97 -u 60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97/userdata -p /run/containers/storage/overlay-containers/60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97/userdata/pidfile -n ovn_controller --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97/userdata/oci-log --conmon-pidfile /run/ovn_controller.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97
             ├─edpm_ovn_metadata_agent.service
             │ └─104653 /usr/bin/conmon --api-version 1 -c 573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b -u 573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b/userdata -p /run/containers/storage/overlay-containers/573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b/userdata/pidfile -n ovn_metadata_agent --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b/userdata/oci-log --conmon-pidfile /run/ovn_metadata_agent.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b
             ├─edpm_podman_exporter.service
             │ └─198925 /usr/bin/conmon --api-version 1 -c ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735 -u ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735/userdata -p /run/containers/storage/overlay-containers/ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735/userdata/pidfile -n podman_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735/userdata/oci-log --conmon-pidfile /run/podman_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735
             ├─gssproxy.service
             │ └─873 /usr/sbin/gssproxy -D
             ├─irqbalance.service
             │ └─781 /usr/sbin/irqbalance
             ├─iscsid.service
             │ └─168784 /usr/sbin/iscsid -f
             ├─multipathd.service
             │ └─168942 /sbin/multipathd -d -s
             ├─ovs-vswitchd.service
             │ └─53815 ovs-vswitchd unix:/var/run/openvswitch/db.sock -vconsole:emer -vsyslog:err -vfile:info --mlockall --user openvswitch:hugetlbfs --no-chdir --log-file=/var/log/openvswitch/ovs-vswitchd.log --pidfile=/var/run/openvswitch/ovs-vswitchd.pid --detach
             ├─ovsdb-server.service
             │ └─53733 ovsdb-server /etc/openvswitch/conf.db -vconsole:emer -vsyslog:err -vfile:info --remote=punix:/var/run/openvswitch/db.sock --private-key=db:Open_vSwitch,SSL,private_key --certificate=db:Open_vSwitch,SSL,certificate --bootstrap-ca-cert=db:Open_vSwitch,SSL,ca_cert --user openvswitch:hugetlbfs --no-chdir --log-file=/var/log/openvswitch/ovsdb-server.log --pidfile=/var/run/openvswitch/ovsdb-server.pid --detach
             ├─podman.service
             │ └─198936 /usr/bin/podman --log-level=info system service
             ├─polkit.service
             │ └─43692 /usr/lib/polkit-1/polkitd --no-debug
             ├─rpcbind.service
             │ └─701 /usr/bin/rpcbind -w -f
             ├─rsyslog.service
             │ └─1004 /usr/sbin/rsyslogd -n
             ├─sshd.service
             │ └─129563 "sshd: /usr/sbin/sshd -D [listener] 0 of 10-100 startups"
             ├─system-getty.slice
             │ └─getty@tty1.service
             │   └─1010 /sbin/agetty -o "-p -- \\u" --noclear - linux
             ├─system-serial\x2dgetty.slice
             │ └─serial-getty@ttyS0.service
             │   └─1011 /sbin/agetty -o "-p -- \\u" --keep-baud 115200,57600,38400,9600 - vt220
             ├─systemd-hostnamed.service
             │ └─265104 /usr/lib/systemd/systemd-hostnamed
             ├─systemd-journald.service
             │ └─679 /usr/lib/systemd/systemd-journald
             ├─systemd-logind.service
             │ └─791 /usr/lib/systemd/systemd-logind
             ├─systemd-machined.service
             │ └─154436 /usr/lib/systemd/systemd-machined
             ├─systemd-udevd.service
             │ └─udev
             │   └─731 /usr/lib/systemd/systemd-udevd
             ├─tuned.service
             │ └─43869 /usr/bin/python3 -Es /usr/sbin/tuned -l -P
             ├─virtlogd.service
             │ └─153807 /usr/sbin/virtlogd
             ├─virtnodedevd.service
             │ └─183033 /usr/sbin/virtnodedevd --timeout 120
             └─virtqemud.service
               └─182740 /usr/sbin/virtqemud --timeout 120

Jan 30 20:21:09 compute-0 nova_compute[183115]: 2026-01-30 20:21:09.710 183119 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263[00m
Jan 30 20:21:10 compute-0 nova_compute[183115]: 2026-01-30 20:21:10.954 183119 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263[00m
Jan 30 20:21:14 compute-0 nova_compute[183115]: 2026-01-30 20:21:14.714 183119 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263[00m
Jan 30 20:21:15 compute-0 podman[267255]: 2026-01-30 20:21:15.360069772 +0000 UTC m=+0.049854988 container health_status 60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5 (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20260127, tcib_managed=true, config_data={'command': 'kolla_start', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal', 'EDPM_CONFIG_HASH': 'fb0ba4ae2b01ec1faae83a80acf21c3ad3948d5c7b1e5820f87360e814103f4d-64c7807694e82e018314df3ffb373ed2487497944b057fc7720c3c54ea99ce8f-4513b9ade86adc87d1a6c9416d7c3bf860314bfcf0b3a2bcdbd881f6906fc595-4513b9ade86adc87d1a6c9416d7c3bf860314bfcf0b3a2bcdbd881f6906fc595'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute', 'test': '/openstack/healthcheck compute'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'net': 'host', 'restart': 'always', 'security_opt': 'label:type:ceilometer_polling_t', 'user': 'ceilometer', 'volumes': ['/var/lib/openstack/telemetry:/var/lib/kolla/config_files/src:z', '/var/lib/kolla/config_files/ceilometer_agent_compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, container_name=ceilometer_agent_compute, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, config_id=ceilometer_agent_compute, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4)
Jan 30 20:21:15 compute-0 podman[267257]: 2026-01-30 20:21:15.373689858 +0000 UTC m=+0.063275428 container health_status ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735 (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'CONTAINER_HOST': 'unix:///run/podman/podman.sock', 'OS_ENDPOINT_TYPE': 'internal', 'EDPM_CONFIG_HASH': '64c7807694e82e018314df3ffb373ed2487497944b057fc7720c3c54ea99ce8f-4513b9ade86adc87d1a6c9416d7c3bf860314bfcf0b3a2bcdbd881f6906fc595'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/podman_exporter', 'test': '/openstack/healthcheck podman_exporter'}, 'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'net': 'host', 'ports': ['9882:9882'], 'privileged': True, 'recreate': True, 'restart': 'always', 'user': 'root', 'volumes': ['/var/lib/openstack/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=podman_exporter, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Jan 30 20:21:15 compute-0 nova_compute[183115]: 2026-01-30 20:21:15.957 183119 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263[00m
Jan 30 20:21:17 compute-0 sshd-session[267367]: Connection closed by authenticating user root 92.118.39.92 port 57606 [preauth]
Jan 30 20:21:18 compute-0 nova_compute[183115]: 2026-01-30 20:21:18.343 183119 DEBUG oslo_service.periodic_task [None req-1257b01d-d285-41ed-a5cf-5c363b6ab87f - - - - - -] Running periodic task ComputeManager._poll_rebooting_instances run_periodic_tasks /usr/lib/python3.9/site-packages/oslo_service/periodic_task.py:210[00m
Jan 30 20:21:18 compute-0 virtqemud[182740]: Failed to connect socket to '/var/run/libvirt/virtstoraged-sock-ro': No such file or directory
Jan 30 20:21:19 compute-0 nova_compute[183115]: 2026-01-30 20:21:19.759 183119 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 27 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263[00m

● user-1000.slice - User Slice of UID 1000
     Loaded: loaded
    Drop-In: /usr/lib/systemd/system/user-.slice.d
             └─10-defaults.conf
     Active: active since Fri 2026-01-30 16:50:03 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:50:03 UTC; 3h 31min ago
       Docs: man:user@.service(5)
         IO: 280.9M read, 6.1G written
      Tasks: 111 (limit: 41288)
     Memory: 5.2G (peak: 5.3G)
        CPU: 15min 25.752s
     CGroup: /user.slice/user-1000.slice
             ├─session-1.scope
             │ └─4517 /usr/bin/python3
             ├─session-100.scope
             │ ├─245026 "sshd-session: zuul [priv]"
             │ └─245029 "sshd-session: zuul@notty"
             ├─session-102.scope
             │ ├─245122 "sshd-session: zuul [priv]"
             │ └─245125 "sshd-session: zuul@notty"
             ├─session-103.scope
             │ ├─245415 "sshd-session: zuul [priv]"
             │ └─245418 "sshd-session: zuul@notty"
             ├─session-105.scope
             │ ├─245517 "sshd-session: zuul [priv]"
             │ └─245520 "sshd-session: zuul@notty"
             ├─session-108.scope
             │ ├─245810 "sshd-session: zuul [priv]"
             │ └─245829 "sshd-session: zuul@notty"
             ├─session-110.scope
             │ ├─245913 "sshd-session: zuul [priv]"
             │ └─245929 "sshd-session: zuul@notty"
             ├─session-111.scope
             │ ├─245989 "sshd-session: zuul [priv]"
             │ └─245992 "sshd-session: zuul@notty"
             ├─session-113.scope
             │ ├─246092 "sshd-session: zuul [priv]"
             │ └─246095 "sshd-session: zuul@notty"
             ├─session-120.scope
             │ ├─246976 "sshd-session: zuul [priv]"
             │ └─246979 "sshd-session: zuul@notty"
             ├─session-122.scope
             │ ├─247166 "sshd-session: zuul [priv]"
             │ └─247169 "sshd-session: zuul@notty"
             ├─session-124.scope
             │ ├─247625 "sshd-session: zuul [priv]"
             │ └─247628 "sshd-session: zuul@notty"
             ├─session-126.scope
             │ ├─247788 "sshd-session: zuul [priv]"
             │ └─247791 "sshd-session: zuul@notty"
             ├─session-128.scope
             │ ├─248316 "sshd-session: zuul [priv]"
             │ └─248319 "sshd-session: zuul@notty"
             ├─session-130.scope
             │ ├─248418 "sshd-session: zuul [priv]"
             │ └─248421 "sshd-session: zuul@notty"
             ├─session-131.scope
             │ ├─248494 "sshd-session: zuul [priv]"
             │ └─248497 "sshd-session: zuul@notty"
             ├─session-133.scope
             │ ├─248553 "sshd-session: zuul [priv]"
             │ └─248568 "sshd-session: zuul@notty"
             ├─session-134.scope
             │ ├─248856 "sshd-session: zuul [priv]"
             │ └─248883 "sshd-session: zuul@notty"
             ├─session-136.scope
             │ ├─249050 "sshd-session: zuul [priv]"
             │ └─249053 "sshd-session: zuul@notty"
             ├─session-158.scope
             │ ├─264277 "sshd-session: zuul [priv]"
             │ ├─264280 "sshd-session: zuul@notty"
             │ ├─264281 sudo bash -c "rm -rf /var/tmp/sos-osp && mkdir /var/tmp/sos-osp && sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp  -p container,openstack_edpm,system,storage,virt"
             │ ├─264314 /usr/bin/python3 -s /sbin/sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp -p container,openstack_edpm,system,storage,virt
             │ ├─267249 timeout 15s turbostat --debug sleep 10
             │ ├─267669 timeout 300s systemctl status --all
             │ ├─267670 systemctl status --all
             │ ├─267750 timeout 300s semanage boolean -l
             │ ├─267752 /usr/bin/python3 -EsI /usr/sbin/semanage boolean -l
             │ ├─267758 timeout --foreground 300s virsh -r nodedev-dumpxml net_tap0251ef32_ee_fe_16_3e_72_3c_9d
             │ └─267759 virsh -r nodedev-dumpxml net_tap0251ef32_ee_fe_16_3e_72_3c_9d
             ├─session-48.scope
             │ ├─240767 "sshd-session: zuul [priv]"
             │ └─240770 "sshd-session: zuul@notty"
             ├─session-50.scope
             │ ├─240930 "sshd-session: zuul [priv]"
             │ └─240933 "sshd-session: zuul@notty"
             ├─session-51.scope
             │ ├─241003 "sshd-session: zuul [priv]"
             │ └─241006 "sshd-session: zuul@notty"
             ├─session-53.scope
             │ ├─241063 "sshd-session: zuul [priv]"
             │ └─241091 "sshd-session: zuul@notty"
             ├─session-54.scope
             │ ├─241146 "sshd-session: zuul [priv]"
             │ └─241149 "sshd-session: zuul@notty"
             ├─session-56.scope
             │ ├─241295 "sshd-session: zuul [priv]"
             │ └─241298 "sshd-session: zuul@notty"
             ├─session-57.scope
             │ ├─241344 "sshd-session: zuul [priv]"
             │ └─241347 "sshd-session: zuul@notty"
             ├─session-59.scope
             │ ├─241444 "sshd-session: zuul [priv]"
             │ └─241447 "sshd-session: zuul@notty"
             ├─session-60.scope
             │ ├─241519 "sshd-session: zuul [priv]"
             │ └─241522 "sshd-session: zuul@notty"
             ├─session-62.scope
             │ ├─241662 "sshd-session: zuul [priv]"
             │ └─241665 "sshd-session: zuul@notty"
             ├─session-68.scope
             │ ├─242466 "sshd-session: zuul [priv]"
             │ └─242469 "sshd-session: zuul@notty"
             ├─session-70.scope
             │ ├─242564 "sshd-session: zuul [priv]"
             │ └─242586 "sshd-session: zuul@notty"
             ├─session-71.scope
             │ ├─242638 "sshd-session: zuul [priv]"
             │ └─242641 "sshd-session: zuul@notty"
             ├─session-73.scope
             │ ├─242742 "sshd-session: zuul [priv]"
             │ └─242745 "sshd-session: zuul@notty"
             ├─session-74.scope
             │ ├─242772 "sshd-session: zuul [priv]"
             │ └─242775 "sshd-session: zuul@notty"
             ├─session-76.scope
             │ ├─242832 "sshd-session: zuul [priv]"
             │ └─242835 "sshd-session: zuul@notty"
             ├─session-79.scope
             │ ├─243425 "sshd-session: zuul [priv]"
             │ └─243428 "sshd-session: zuul@notty"
             ├─session-81.scope
             │ ├─243524 "sshd-session: zuul [priv]"
             │ └─243527 "sshd-session: zuul@notty"
             ├─session-82.scope
             │ ├─243554 "sshd-session: zuul [priv]"
             │ └─243557 "sshd-session: zuul@notty"
             ├─session-84.scope
             │ ├─243613 "sshd-session: zuul [priv]"
             │ └─243616 "sshd-session: zuul@notty"
             ├─session-85.scope
             │ ├─243643 "sshd-session: zuul [priv]"
             │ └─243668 "sshd-session: zuul@notty"
             ├─session-87.scope
             │ ├─243790 "sshd-session: zuul [priv]"
             │ └─243793 "sshd-session: zuul@notty"
             ├─session-94.scope
             │ ├─244430 "sshd-session: zuul [priv]"
             │ └─244433 "sshd-session: zuul@notty"
             ├─session-96.scope
             │ ├─244575 "sshd-session: zuul [priv]"
             │ └─244578 "sshd-session: zuul@notty"
             ├─session-97.scope
             │ ├─244785 "sshd-session: zuul [priv]"
             │ └─244807 "sshd-session: zuul@notty"
             ├─session-99.scope
             │ ├─244996 "sshd-session: zuul [priv]"
             │ └─244999 "sshd-session: zuul@notty"
             └─user@1000.service
               ├─app.slice
               │ └─dbus-broker.service
               │   ├─15953 /usr/bin/dbus-broker-launch --scope user
               │   └─15968 dbus-broker --log 4 --controller 9 --machine-id bf0bc0bb03de29b24cba1cc9599cf5d0 --max-bytes 100000000000000 --max-fds 25000000000000 --max-matches 5000000000
               ├─init.scope
               │ ├─4307 /usr/lib/systemd/systemd --user
               │ └─4309 "(sd-pam)"
               └─user.slice
                 └─podman-pause-bff822aa.scope
                   └─15896 catatonit -P

Jan 30 20:15:53 compute-0 sshd-session[262839]: Connection closed by 38.102.83.246 port 46210
Jan 30 20:15:53 compute-0 sshd-session[262866]: Connection closed by 38.102.83.246 port 48086
Jan 30 20:16:30 compute-0 sshd-session[263055]: Connection closed by 38.102.83.246 port 36008
Jan 30 20:18:44 compute-0 sshd-session[263670]: Connection closed by 38.102.83.246 port 58814
Jan 30 20:18:44 compute-0 sshd-session[263697]: Connection closed by 38.102.83.246 port 58828
Jan 30 20:20:56 compute-0 sudo[264281]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/bash -c 'rm -rf /var/tmp/sos-osp && mkdir /var/tmp/sos-osp && sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp  -p container,openstack_edpm,system,storage,virt'
Jan 30 20:20:56 compute-0 sudo[264281]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 20:21:00 compute-0 ovs-vsctl[264494]: ovs|00001|db_ctl_base|ERR|no key "dpdk-init" in Open_vSwitch record "." column other_config
Jan 30 20:21:04 compute-0 sshd-session[265022]: Connection closed by 38.102.83.246 port 39416
Jan 30 20:21:04 compute-0 sshd-session[265055]: Connection closed by 38.102.83.246 port 39428

● user.slice - User and Session Slice
     Loaded: loaded (/usr/lib/systemd/system/user.slice; static)
     Active: active since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
       Docs: man:systemd.special(7)
         IO: 280.9M read, 6.1G written
      Tasks: 113
     Memory: 5.2G (peak: 5.3G)
        CPU: 15min 26.405s
     CGroup: /user.slice
             └─user-1000.slice
               ├─session-1.scope
               │ └─4517 /usr/bin/python3
               ├─session-100.scope
               │ ├─245026 "sshd-session: zuul [priv]"
               │ └─245029 "sshd-session: zuul@notty"
               ├─session-102.scope
               │ ├─245122 "sshd-session: zuul [priv]"
               │ └─245125 "sshd-session: zuul@notty"
               ├─session-103.scope
               │ ├─245415 "sshd-session: zuul [priv]"
               │ └─245418 "sshd-session: zuul@notty"
               ├─session-105.scope
               │ ├─245517 "sshd-session: zuul [priv]"
               │ └─245520 "sshd-session: zuul@notty"
               ├─session-108.scope
               │ ├─245810 "sshd-session: zuul [priv]"
               │ └─245829 "sshd-session: zuul@notty"
               ├─session-110.scope
               │ ├─245913 "sshd-session: zuul [priv]"
               │ └─245929 "sshd-session: zuul@notty"
               ├─session-111.scope
               │ ├─245989 "sshd-session: zuul [priv]"
               │ └─245992 "sshd-session: zuul@notty"
               ├─session-113.scope
               │ ├─246092 "sshd-session: zuul [priv]"
               │ └─246095 "sshd-session: zuul@notty"
               ├─session-120.scope
               │ ├─246976 "sshd-session: zuul [priv]"
               │ └─246979 "sshd-session: zuul@notty"
               ├─session-122.scope
               │ ├─247166 "sshd-session: zuul [priv]"
               │ └─247169 "sshd-session: zuul@notty"
               ├─session-124.scope
               │ ├─247625 "sshd-session: zuul [priv]"
               │ └─247628 "sshd-session: zuul@notty"
               ├─session-126.scope
               │ ├─247788 "sshd-session: zuul [priv]"
               │ └─247791 "sshd-session: zuul@notty"
               ├─session-128.scope
               │ ├─248316 "sshd-session: zuul [priv]"
               │ └─248319 "sshd-session: zuul@notty"
               ├─session-130.scope
               │ ├─248418 "sshd-session: zuul [priv]"
               │ └─248421 "sshd-session: zuul@notty"
               ├─session-131.scope
               │ ├─248494 "sshd-session: zuul [priv]"
               │ └─248497 "sshd-session: zuul@notty"
               ├─session-133.scope
               │ ├─248553 "sshd-session: zuul [priv]"
               │ └─248568 "sshd-session: zuul@notty"
               ├─session-134.scope
               │ ├─248856 "sshd-session: zuul [priv]"
               │ └─248883 "sshd-session: zuul@notty"
               ├─session-136.scope
               │ ├─249050 "sshd-session: zuul [priv]"
               │ └─249053 "sshd-session: zuul@notty"
               ├─session-158.scope
               │ ├─264277 "sshd-session: zuul [priv]"
               │ ├─264280 "sshd-session: zuul@notty"
               │ ├─264281 sudo bash -c "rm -rf /var/tmp/sos-osp && mkdir /var/tmp/sos-osp && sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp  -p container,openstack_edpm,system,storage,virt"
               │ ├─264314 /usr/bin/python3 -s /sbin/sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp -p container,openstack_edpm,system,storage,virt
               │ ├─267249 timeout 15s turbostat --debug sleep 10
               │ ├─267669 timeout 300s systemctl status --all
               │ ├─267670 systemctl status --all
               │ ├─267750 timeout 300s semanage boolean -l
               │ ├─267752 /usr/bin/python3 -EsI /usr/sbin/semanage boolean -l
               │ ├─267758 timeout --foreground 300s virsh -r nodedev-dumpxml net_tap0251ef32_ee_fe_16_3e_72_3c_9d
               │ └─267759 virsh -r nodedev-dumpxml net_tap0251ef32_ee_fe_16_3e_72_3c_9d
               ├─session-48.scope
               │ ├─240767 "sshd-session: zuul [priv]"
               │ └─240770 "sshd-session: zuul@notty"
               ├─session-50.scope
               │ ├─240930 "sshd-session: zuul [priv]"
               │ └─240933 "sshd-session: zuul@notty"
               ├─session-51.scope
               │ ├─241003 "sshd-session: zuul [priv]"
               │ └─241006 "sshd-session: zuul@notty"
               ├─session-53.scope
               │ ├─241063 "sshd-session: zuul [priv]"
               │ └─241091 "sshd-session: zuul@notty"
               ├─session-54.scope
               │ ├─241146 "sshd-session: zuul [priv]"
               │ └─241149 "sshd-session: zuul@notty"
               ├─session-56.scope
               │ ├─241295 "sshd-session: zuul [priv]"
               │ └─241298 "sshd-session: zuul@notty"
               ├─session-57.scope
               │ ├─241344 "sshd-session: zuul [priv]"
               │ └─241347 "sshd-session: zuul@notty"
               ├─session-59.scope
               │ ├─241444 "sshd-session: zuul [priv]"
               │ └─241447 "sshd-session: zuul@notty"
               ├─session-60.scope
               │ ├─241519 "sshd-session: zuul [priv]"
               │ └─241522 "sshd-session: zuul@notty"
               ├─session-62.scope
               │ ├─241662 "sshd-session: zuul [priv]"
               │ └─241665 "sshd-session: zuul@notty"
               ├─session-68.scope
               │ ├─242466 "sshd-session: zuul [priv]"
               │ └─242469 "sshd-session: zuul@notty"
               ├─session-70.scope
               │ ├─242564 "sshd-session: zuul [priv]"
               │ └─242586 "sshd-session: zuul@notty"
               ├─session-71.scope
               │ ├─242638 "sshd-session: zuul [priv]"
               │ └─242641 "sshd-session: zuul@notty"
               ├─session-73.scope
               │ ├─242742 "sshd-session: zuul [priv]"
               │ └─242745 "sshd-session: zuul@notty"
               ├─session-74.scope
               │ ├─242772 "sshd-session: zuul [priv]"
               │ └─242775 "sshd-session: zuul@notty"
               ├─session-76.scope
               │ ├─242832 "sshd-session: zuul [priv]"
               │ └─242835 "sshd-session: zuul@notty"
               ├─session-79.scope
               │ ├─243425 "sshd-session: zuul [priv]"
               │ └─243428 "sshd-session: zuul@notty"
               ├─session-81.scope
               │ ├─243524 "sshd-session: zuul [priv]"
               │ └─243527 "sshd-session: zuul@notty"
               ├─session-82.scope
               │ ├─243554 "sshd-session: zuul [priv]"
               │ └─243557 "sshd-session: zuul@notty"
               ├─session-84.scope
               │ ├─243613 "sshd-session: zuul [priv]"
               │ └─243616 "sshd-session: zuul@notty"
               ├─session-85.scope
               │ ├─243643 "sshd-session: zuul [priv]"
               │ └─243668 "sshd-session: zuul@notty"
               ├─session-87.scope
               │ ├─243790 "sshd-session: zuul [priv]"
               │ └─243793 "sshd-session: zuul@notty"
               ├─session-94.scope
               │ ├─244430 "sshd-session: zuul [priv]"
               │ └─244433 "sshd-session: zuul@notty"
               ├─session-96.scope
               │ ├─244575 "sshd-session: zuul [priv]"
               │ └─244578 "sshd-session: zuul@notty"
               ├─session-97.scope
               │ ├─244785 "sshd-session: zuul [priv]"
               │ └─244807 "sshd-session: zuul@notty"
               ├─session-99.scope
               │ ├─244996 "sshd-session: zuul [priv]"
               │ └─244999 "sshd-session: zuul@notty"
               └─user@1000.service
                 ├─app.slice
                 │ └─dbus-broker.service
                 │   ├─15953 /usr/bin/dbus-broker-launch --scope user
                 │   └─15968 dbus-broker --log 4 --controller 9 --machine-id bf0bc0bb03de29b24cba1cc9599cf5d0 --max-bytes 100000000000000 --max-fds 25000000000000 --max-matches 5000000000
                 ├─init.scope
                 │ ├─4307 /usr/lib/systemd/systemd --user
                 │ └─4309 "(sd-pam)"
                 └─user.slice
                   └─podman-pause-bff822aa.scope
                     └─15896 catatonit -P

● dbus.socket - D-Bus System Message Bus Socket
     Loaded: loaded (/usr/lib/systemd/system/dbus.socket; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
   Triggers: ● dbus-broker.service
     Listen: /run/dbus/system_bus_socket (Stream)
     CGroup: /system.slice/dbus.socket

Jan 30 16:49:47 localhost systemd[1]: Listening on D-Bus System Message Bus Socket.

● dm-event.socket - Device-mapper event daemon FIFOs
     Loaded: loaded (/usr/lib/systemd/system/dm-event.socket; enabled; preset: enabled)
     Active: active (listening) since Fri 2026-01-30 17:24:17 UTC; 2h 57min ago
      Until: Fri 2026-01-30 17:24:17 UTC; 2h 57min ago
   Triggers: ● dm-event.service
       Docs: man:dmeventd(8)
     Listen: /run/dmeventd-server (FIFO)
             /run/dmeventd-client (FIFO)
     CGroup: /system.slice/dm-event.socket

Jan 30 17:24:17 compute-0 systemd[1]: Listening on Device-mapper event daemon FIFOs.

● iscsid.socket - Open-iSCSI iscsid Socket
     Loaded: loaded (/usr/lib/systemd/system/iscsid.socket; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 17:40:31 UTC; 2h 40min ago
      Until: Fri 2026-01-30 17:40:31 UTC; 2h 40min ago
   Triggers: ● iscsid.service
       Docs: man:iscsid(8)
             man:iscsiadm(8)
     Listen: @ISCSIADM_ABSTRACT_NAMESPACE (Stream)
     CGroup: /system.slice/iscsid.socket

Jan 30 17:40:31 compute-0 systemd[1]: Listening on Open-iSCSI iscsid Socket.

○ iscsiuio.socket - Open-iSCSI iscsiuio Socket
     Loaded: loaded (/usr/lib/systemd/system/iscsiuio.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● iscsiuio.service
       Docs: man:iscsiuio(8)
     Listen: @ISCSID_UIP_ABSTRACT_NAMESPACE (Stream)

○ libvirtd-admin.socket - libvirt legacy monolithic daemon admin socket
     Loaded: loaded (/usr/lib/systemd/system/libvirtd-admin.socket; disabled; preset: disabled)
     Active: inactive (dead)
   Triggers: ● libvirtd.service
     Listen: /run/libvirt/libvirt-admin-sock (Stream)

○ libvirtd-ro.socket - libvirt legacy monolithic daemon read-only socket
     Loaded: loaded (/usr/lib/systemd/system/libvirtd-ro.socket; disabled; preset: disabled)
     Active: inactive (dead)
   Triggers: ● libvirtd.service
     Listen: /run/libvirt/libvirt-sock-ro (Stream)

○ libvirtd-tls.socket
     Loaded: masked (Reason: Unit libvirtd-tls.socket is masked.)
     Active: inactive (dead)

○ libvirtd.socket - libvirt legacy monolithic daemon socket
     Loaded: loaded (/usr/lib/systemd/system/libvirtd.socket; disabled; preset: disabled)
     Active: inactive (dead)
   Triggers: ● libvirtd.service
     Listen: /run/libvirt/libvirt-sock (Stream)

● lvm2-lvmpolld.socket - LVM2 poll daemon socket
     Loaded: loaded (/usr/lib/systemd/system/lvm2-lvmpolld.socket; enabled; preset: enabled)
     Active: active (listening) since Fri 2026-01-30 17:24:18 UTC; 2h 57min ago
      Until: Fri 2026-01-30 17:24:18 UTC; 2h 57min ago
   Triggers: ● lvm2-lvmpolld.service
       Docs: man:lvmpolld(8)
     Listen: /run/lvm/lvmpolld.socket (Stream)
     CGroup: /system.slice/lvm2-lvmpolld.socket

Jan 30 17:24:18 compute-0 systemd[1]: Listening on LVM2 poll daemon socket.

● multipathd.socket - multipathd control socket
     Loaded: loaded (/usr/lib/systemd/system/multipathd.socket; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:40:55 UTC; 2h 40min ago
      Until: Fri 2026-01-30 17:40:55 UTC; 2h 40min ago
   Triggers: ● multipathd.service
     Listen: @/org/kernel/linux/storage/multipathd (Stream)
             /run/multipathd.socket (Stream)
     CGroup: /system.slice/multipathd.socket

Jan 30 17:40:55 compute-0 systemd[1]: Listening on multipathd control socket.

● podman.socket - Podman API Socket
     Loaded: loaded (/usr/lib/systemd/system/podman.socket; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:43:06 UTC; 2h 38min ago
      Until: Fri 2026-01-30 17:43:06 UTC; 2h 38min ago
   Triggers: ● podman.service
       Docs: man:podman-system-service(1)
     Listen: /run/podman/podman.sock (Stream)
     CGroup: /system.slice/podman.socket

Jan 30 17:43:06 compute-0 systemd[1]: Listening on Podman API Socket.

● rpcbind.socket - RPCbind Server Activation Socket
     Loaded: loaded (/usr/lib/systemd/system/rpcbind.socket; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
   Triggers: ● rpcbind.service
     Listen: /run/rpcbind.sock (Stream)
             0.0.0.0:111 (Stream)
             0.0.0.0:111 (Datagram)
             [::]:111 (Stream)
             [::]:111 (Datagram)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 16.0K (peak: 288.0K)
        CPU: 4ms
     CGroup: /system.slice/rpcbind.socket

● sssd-kcm.socket - SSSD Kerberos Cache Manager responder socket
     Loaded: loaded (/usr/lib/systemd/system/sssd-kcm.socket; enabled; preset: enabled)
     Active: active (listening) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
   Triggers: ● sssd-kcm.service
       Docs: man:sssd-kcm(8)
     Listen: /run/.heim_org.h5l.kcm-socket (Stream)
     CGroup: /system.slice/sssd-kcm.socket

Jan 30 16:49:47 localhost systemd[1]: Listening on SSSD Kerberos Cache Manager responder socket.

○ syslog.socket - Syslog Socket
     Loaded: loaded (/usr/lib/systemd/system/syslog.socket; static)
     Active: inactive (dead)
   Triggers: ● syslog.service
       Docs: man:systemd.special(7)
             https://www.freedesktop.org/wiki/Software/systemd/syslog
     Listen: /run/systemd/journal/syslog (Datagram)

● systemd-coredump.socket - Process Core Dump Socket
     Loaded: loaded (/usr/lib/systemd/system/systemd-coredump.socket; static)
     Active: active (listening) since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
       Docs: man:systemd-coredump(8)
     Listen: /run/systemd/coredump (SequentialPacket)
   Accepted: 0; Connected: 0;
     CGroup: /system.slice/systemd-coredump.socket

● systemd-initctl.socket - initctl Compatibility Named Pipe
     Loaded: loaded (/usr/lib/systemd/system/systemd-initctl.socket; static)
     Active: active (listening) since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
   Triggers: ● systemd-initctl.service
       Docs: man:systemd-initctl.socket(8)
     Listen: /run/initctl (FIFO)
     CGroup: /system.slice/systemd-initctl.socket

● systemd-journald-dev-log.socket - Journal Socket (/dev/log)
     Loaded: loaded (/usr/lib/systemd/system/systemd-journald-dev-log.socket; static)
     Active: active (running) since Fri 2026-01-30 16:49:43 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:43 UTC; 3h 31min ago
   Triggers: ● systemd-journald.service
       Docs: man:systemd-journald.service(8)
             man:journald.conf(5)
     Listen: /run/systemd/journal/dev-log (Datagram)
     CGroup: /system.slice/systemd-journald-dev-log.socket

Notice: journal has been rotated since unit was started, output may be incomplete.

● systemd-journald.socket - Journal Socket
     Loaded: loaded (/usr/lib/systemd/system/systemd-journald.socket; static)
     Active: active (running) since Fri 2026-01-30 16:49:43 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:43 UTC; 3h 31min ago
   Triggers: ● systemd-journald.service
       Docs: man:systemd-journald.service(8)
             man:journald.conf(5)
     Listen: /run/systemd/journal/socket (Datagram)
             /run/systemd/journal/stdout (Stream)
     CGroup: /system.slice/systemd-journald.socket

Notice: journal has been rotated since unit was started, output may be incomplete.

● systemd-rfkill.socket - Load/Save RF Kill Switch Status /dev/rfkill Watch
     Loaded: loaded (/usr/lib/systemd/system/systemd-rfkill.socket; static)
     Active: active (listening) since Fri 2026-01-30 16:49:49 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:49 UTC; 3h 31min ago
   Triggers: ● systemd-rfkill.service
       Docs: man:systemd-rfkill.socket(8)
     Listen: /dev/rfkill (Special)
     CGroup: /system.slice/systemd-rfkill.socket

Jan 30 16:49:49 np0005602930.novalocal systemd[1]: Listening on Load/Save RF Kill Switch Status /dev/rfkill Watch.

● systemd-udevd-control.socket - udev Control Socket
     Loaded: loaded (/usr/lib/systemd/system/systemd-udevd-control.socket; static)
     Active: active (running) since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
   Triggers: ● systemd-udevd.service
       Docs: man:systemd-udevd-control.socket(8)
             man:udev(7)
     Listen: /run/udev/control (SequentialPacket)
     CGroup: /system.slice/systemd-udevd-control.socket

● systemd-udevd-kernel.socket - udev Kernel Socket
     Loaded: loaded (/usr/lib/systemd/system/systemd-udevd-kernel.socket; static)
     Active: active (running) since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
   Triggers: ● systemd-udevd.service
       Docs: man:systemd-udevd-kernel.socket(8)
             man:udev(7)
     Listen: kobject-uevent 1 (Netlink)
     CGroup: /system.slice/systemd-udevd-kernel.socket

○ virtinterfaced-admin.socket - libvirt interface daemon admin socket
     Loaded: loaded (/usr/lib/systemd/system/virtinterfaced-admin.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtinterfaced.service
     Listen: /run/libvirt/virtinterfaced-admin-sock (Stream)

○ virtinterfaced-ro.socket - libvirt interface daemon read-only socket
     Loaded: loaded (/usr/lib/systemd/system/virtinterfaced-ro.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtinterfaced.service
     Listen: /run/libvirt/virtinterfaced-sock-ro (Stream)

○ virtinterfaced.socket - libvirt interface daemon socket
     Loaded: loaded (/usr/lib/systemd/system/virtinterfaced.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtinterfaced.service
     Listen: /run/libvirt/virtinterfaced-sock (Stream)

○ virtlockd-admin.socket - libvirt locking daemon admin socket
     Loaded: loaded (/usr/lib/systemd/system/virtlockd-admin.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtlockd.service
     Listen: /run/libvirt/virtlockd-admin-sock (Stream)

● virtlockd.socket - libvirt locking daemon socket
     Loaded: loaded (/usr/lib/systemd/system/virtlockd.socket; enabled; preset: enabled)
     Active: active (listening) since Fri 2026-01-30 17:39:33 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:33 UTC; 2h 41min ago
   Triggers: ● virtlockd.service
     Listen: /run/libvirt/virtlockd-sock (Stream)
     CGroup: /system.slice/virtlockd.socket

Jan 30 17:39:33 compute-0 systemd[1]: Listening on libvirt locking daemon socket.

● virtlogd-admin.socket - libvirt logging daemon admin socket
     Loaded: loaded (/usr/lib/systemd/system/virtlogd-admin.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtlogd-admin.socket.d
             └─override.conf
     Active: active (running) since Fri 2026-01-30 17:39:30 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:30 UTC; 2h 41min ago
   Triggers: ● virtlogd.service
     Listen: /run/libvirt/virtlogd-admin-sock (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 0B (peak: 512.0K)
        CPU: 2ms
     CGroup: /system.slice/virtlogd-admin.socket

Jan 30 17:39:30 compute-0 systemd[1]: Starting libvirt logging daemon admin socket...
Jan 30 17:39:30 compute-0 systemd[1]: Listening on libvirt logging daemon admin socket.

● virtlogd.socket - libvirt logging daemon socket
     Loaded: loaded (/usr/lib/systemd/system/virtlogd.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtlogd.socket.d
             └─override.conf
     Active: active (running) since Fri 2026-01-30 17:39:30 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:30 UTC; 2h 41min ago
   Triggers: ● virtlogd.service
     Listen: /run/libvirt/virtlogd-sock (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 0B (peak: 512.0K)
        CPU: 3ms
     CGroup: /system.slice/virtlogd.socket

Jan 30 17:39:30 compute-0 systemd[1]: Starting libvirt logging daemon socket...
Jan 30 17:39:30 compute-0 systemd[1]: Listening on libvirt logging daemon socket.

○ virtnetworkd-admin.socket - libvirt network daemon admin socket
     Loaded: loaded (/usr/lib/systemd/system/virtnetworkd-admin.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtnetworkd.service
     Listen: /run/libvirt/virtnetworkd-admin-sock (Stream)

○ virtnetworkd-ro.socket - libvirt network daemon read-only socket
     Loaded: loaded (/usr/lib/systemd/system/virtnetworkd-ro.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtnetworkd.service
     Listen: /run/libvirt/virtnetworkd-sock-ro (Stream)

○ virtnetworkd.socket - libvirt network daemon socket
     Loaded: loaded (/usr/lib/systemd/system/virtnetworkd.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtnetworkd.service
     Listen: /run/libvirt/virtnetworkd-sock (Stream)

● virtnodedevd-admin.socket - libvirt nodedev daemon admin socket
     Loaded: loaded (/usr/lib/systemd/system/virtnodedevd-admin.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtnodedevd-admin.socket.d
             └─override.conf
     Active: active (running) since Fri 2026-01-30 17:39:31 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:31 UTC; 2h 41min ago
   Triggers: ● virtnodedevd.service
     Listen: /run/libvirt/virtnodedevd-admin-sock (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 0B (peak: 512.0K)
        CPU: 2ms
     CGroup: /system.slice/virtnodedevd-admin.socket

Jan 30 17:39:31 compute-0 systemd[1]: Starting libvirt nodedev daemon admin socket...
Jan 30 17:39:31 compute-0 systemd[1]: Listening on libvirt nodedev daemon admin socket.

● virtnodedevd-ro.socket - libvirt nodedev daemon read-only socket
     Loaded: loaded (/usr/lib/systemd/system/virtnodedevd-ro.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtnodedevd-ro.socket.d
             └─override.conf
     Active: active (running) since Fri 2026-01-30 17:39:31 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:31 UTC; 2h 41min ago
   Triggers: ● virtnodedevd.service
     Listen: /run/libvirt/virtnodedevd-sock-ro (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 0B (peak: 512.0K)
        CPU: 4ms
     CGroup: /system.slice/virtnodedevd-ro.socket

Jan 30 17:39:31 compute-0 systemd[1]: Starting libvirt nodedev daemon read-only socket...
Jan 30 17:39:31 compute-0 systemd[1]: Listening on libvirt nodedev daemon read-only socket.

● virtnodedevd.socket - libvirt nodedev daemon socket
     Loaded: loaded (/usr/lib/systemd/system/virtnodedevd.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtnodedevd.socket.d
             └─override.conf
     Active: active (running) since Fri 2026-01-30 17:39:31 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:31 UTC; 2h 41min ago
   Triggers: ● virtnodedevd.service
     Listen: /run/libvirt/virtnodedevd-sock (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 0B (peak: 512.0K)
        CPU: 2ms
     CGroup: /system.slice/virtnodedevd.socket

Jan 30 17:39:31 compute-0 systemd[1]: Starting libvirt nodedev daemon socket...
Jan 30 17:39:31 compute-0 systemd[1]: Listening on libvirt nodedev daemon socket.

○ virtnwfilterd-admin.socket - libvirt nwfilter daemon admin socket
     Loaded: loaded (/usr/lib/systemd/system/virtnwfilterd-admin.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtnwfilterd.service
     Listen: /run/libvirt/virtnwfilterd-admin-sock (Stream)

○ virtnwfilterd-ro.socket - libvirt nwfilter daemon read-only socket
     Loaded: loaded (/usr/lib/systemd/system/virtnwfilterd-ro.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtnwfilterd.service
     Listen: /run/libvirt/virtnwfilterd-sock-ro (Stream)

○ virtnwfilterd.socket - libvirt nwfilter daemon socket
     Loaded: loaded (/usr/lib/systemd/system/virtnwfilterd.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtnwfilterd.service
     Listen: /run/libvirt/virtnwfilterd-sock (Stream)

● virtproxyd-admin.socket - libvirt proxy daemon admin socket
     Loaded: loaded (/usr/lib/systemd/system/virtproxyd-admin.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtproxyd-admin.socket.d
             └─override.conf
     Active: active (listening) since Fri 2026-01-30 17:39:32 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:32 UTC; 2h 41min ago
   Triggers: ● virtproxyd.service
     Listen: /run/libvirt/libvirt-admin-sock (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 0B (peak: 420.0K)
        CPU: 2ms
     CGroup: /system.slice/virtproxyd-admin.socket

Jan 30 17:39:32 compute-0 systemd[1]: Starting libvirt proxy daemon admin socket...
Jan 30 17:39:32 compute-0 systemd[1]: Listening on libvirt proxy daemon admin socket.

● virtproxyd-ro.socket - libvirt proxy daemon read-only socket
     Loaded: loaded (/usr/lib/systemd/system/virtproxyd-ro.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtproxyd-ro.socket.d
             └─override.conf
     Active: active (listening) since Fri 2026-01-30 17:39:32 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:32 UTC; 2h 41min ago
   Triggers: ● virtproxyd.service
     Listen: /run/libvirt/libvirt-sock-ro (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 0B (peak: 512.0K)
        CPU: 2ms
     CGroup: /system.slice/virtproxyd-ro.socket

Jan 30 17:39:32 compute-0 systemd[1]: Starting libvirt proxy daemon read-only socket...
Jan 30 17:39:32 compute-0 systemd[1]: Listening on libvirt proxy daemon read-only socket.

● virtproxyd-tls.socket - libvirt proxy daemon TLS IP socket
     Loaded: loaded (/usr/lib/systemd/system/virtproxyd-tls.socket; enabled; preset: disabled)
     Active: active (listening) since Fri 2026-01-30 17:38:29 UTC; 2h 42min ago
      Until: Fri 2026-01-30 17:38:29 UTC; 2h 42min ago
   Triggers: ● virtproxyd.service
     Listen: [::]:16514 (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 8.0K (peak: 256.0K)
        CPU: 1ms
     CGroup: /system.slice/virtproxyd-tls.socket

Jan 30 17:38:29 compute-0 systemd[1]: Listening on libvirt proxy daemon TLS IP socket.

● virtproxyd.socket - libvirt proxy daemon socket
     Loaded: loaded (/usr/lib/systemd/system/virtproxyd.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtproxyd.socket.d
             └─override.conf
     Active: active (listening) since Fri 2026-01-30 17:38:29 UTC; 2h 42min ago
      Until: Fri 2026-01-30 17:38:29 UTC; 2h 42min ago
   Triggers: ● virtproxyd.service
     Listen: /run/libvirt/libvirt-sock (Stream)
     CGroup: /system.slice/virtproxyd.socket

Jan 30 17:38:29 compute-0 systemd[1]: Listening on libvirt proxy daemon socket.

● virtqemud-admin.socket - libvirt QEMU daemon admin socket
     Loaded: loaded (/usr/lib/systemd/system/virtqemud-admin.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtqemud-admin.socket.d
             └─override.conf
     Active: active (running) since Fri 2026-01-30 17:39:33 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:33 UTC; 2h 41min ago
   Triggers: ● virtqemud.service
     Listen: /run/libvirt/virtqemud-admin-sock (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 0B (peak: 500.0K)
        CPU: 2ms
     CGroup: /system.slice/virtqemud-admin.socket

Jan 30 17:39:33 compute-0 systemd[1]: Starting libvirt QEMU daemon admin socket...
Jan 30 17:39:33 compute-0 systemd[1]: Listening on libvirt QEMU daemon admin socket.

● virtqemud-ro.socket - libvirt QEMU daemon read-only socket
     Loaded: loaded (/usr/lib/systemd/system/virtqemud-ro.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtqemud-ro.socket.d
             └─override.conf
     Active: active (running) since Fri 2026-01-30 17:39:33 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:33 UTC; 2h 41min ago
   Triggers: ● virtqemud.service
     Listen: /run/libvirt/virtqemud-sock-ro (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 0B (peak: 464.0K)
        CPU: 3ms
     CGroup: /system.slice/virtqemud-ro.socket

Jan 30 17:39:33 compute-0 systemd[1]: Starting libvirt QEMU daemon read-only socket...
Jan 30 17:39:33 compute-0 systemd[1]: Listening on libvirt QEMU daemon read-only socket.

● virtqemud.socket - libvirt QEMU daemon socket
     Loaded: loaded (/usr/lib/systemd/system/virtqemud.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtqemud.socket.d
             └─override.conf
     Active: active (running) since Fri 2026-01-30 17:39:33 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:33 UTC; 2h 41min ago
   Triggers: ● virtqemud.service
     Listen: /run/libvirt/virtqemud-sock (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 0B (peak: 512.0K)
        CPU: 3ms
     CGroup: /system.slice/virtqemud.socket

Jan 30 17:39:33 compute-0 systemd[1]: Starting libvirt QEMU daemon socket...
Jan 30 17:39:33 compute-0 systemd[1]: Listening on libvirt QEMU daemon socket.

● virtsecretd-admin.socket - libvirt secret daemon admin socket
     Loaded: loaded (/usr/lib/systemd/system/virtsecretd-admin.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtsecretd-admin.socket.d
             └─override.conf
     Active: active (listening) since Fri 2026-01-30 17:39:34 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:34 UTC; 2h 41min ago
   Triggers: ● virtsecretd.service
     Listen: /run/libvirt/virtsecretd-admin-sock (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 4.0K (peak: 512.0K)
        CPU: 1ms
     CGroup: /system.slice/virtsecretd-admin.socket

Jan 30 17:39:34 compute-0 systemd[1]: Starting libvirt secret daemon admin socket...
Jan 30 17:39:34 compute-0 systemd[1]: Listening on libvirt secret daemon admin socket.

● virtsecretd-ro.socket - libvirt secret daemon read-only socket
     Loaded: loaded (/usr/lib/systemd/system/virtsecretd-ro.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtsecretd-ro.socket.d
             └─override.conf
     Active: active (listening) since Fri 2026-01-30 17:39:34 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:34 UTC; 2h 41min ago
   Triggers: ● virtsecretd.service
     Listen: /run/libvirt/virtsecretd-sock-ro (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 0B (peak: 576.0K)
        CPU: 1ms
     CGroup: /system.slice/virtsecretd-ro.socket

Jan 30 17:39:34 compute-0 systemd[1]: Starting libvirt secret daemon read-only socket...
Jan 30 17:39:34 compute-0 systemd[1]: Listening on libvirt secret daemon read-only socket.

● virtsecretd.socket - libvirt secret daemon socket
     Loaded: loaded (/usr/lib/systemd/system/virtsecretd.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtsecretd.socket.d
             └─override.conf
     Active: active (listening) since Fri 2026-01-30 17:39:34 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:34 UTC; 2h 41min ago
   Triggers: ● virtsecretd.service
     Listen: /run/libvirt/virtsecretd-sock (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 0B (peak: 528.0K)
        CPU: 3ms
     CGroup: /system.slice/virtsecretd.socket

Jan 30 17:39:34 compute-0 systemd[1]: Starting libvirt secret daemon socket...
Jan 30 17:39:34 compute-0 systemd[1]: Listening on libvirt secret daemon socket.

○ virtstoraged-admin.socket - libvirt storage daemon admin socket
     Loaded: loaded (/usr/lib/systemd/system/virtstoraged-admin.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtstoraged.service
     Listen: /run/libvirt/virtstoraged-admin-sock (Stream)

○ virtstoraged-ro.socket - libvirt storage daemon read-only socket
     Loaded: loaded (/usr/lib/systemd/system/virtstoraged-ro.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtstoraged.service
     Listen: /run/libvirt/virtstoraged-sock-ro (Stream)

○ virtstoraged.socket - libvirt storage daemon socket
     Loaded: loaded (/usr/lib/systemd/system/virtstoraged.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtstoraged.service
     Listen: /run/libvirt/virtstoraged-sock (Stream)

● swap.swap - /swap
     Loaded: loaded (/etc/fstab; generated)
     Active: active since Fri 2026-01-30 17:26:29 UTC; 2h 54min ago
      Until: Fri 2026-01-30 17:26:29 UTC; 2h 54min ago
       What: /swap
       Docs: man:fstab(5)
             man:systemd-fstab-generUnit ceph.target could not be found.
ator(8)

● basic.target - Basic System
     Loaded: loaded (/usr/lib/systemd/system/basic.target; static)
     Active: active since Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

Jan 30 16:49:48 localhost systemd[1]: Reached target Basic System.

○ blockdev@dev-disk-by\x2duuid-822f14ea\x2d6e7e\x2d41df\x2db0d8\x2dfbe282d9ded8.target - Block Device Preparation for /dev/disk/by-uuid/822f14ea-6e7e-41df-b0d8-fbe282d9ded8
     Loaded: loaded (/usr/lib/systemd/system/blockdev@.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

○ blockdev@dev-vda1.target - Block Device Preparation for /dev/vda1
     Loaded: loaded (/usr/lib/systemd/system/blockdev@.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

● cloud-config.target - Cloud-config availability
     Loaded: loaded (/usr/lib/systemd/system/cloud-config.target; static)
     Active: active since Fri 2026-01-30 16:49:51 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:51 UTC; 3h 31min ago

Jan 30 16:49:51 np0005602930.novalocal systemd[1]: Reached target Cloud-config availability.

● cloud-init.target - Cloud-init target
     Loaded: loaded (/usr/lib/systemd/system/cloud-init.target; enabled-runtime; preset: disabled)
     Active: active since Fri 2026-01-30 16:49:51 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:51 UTC; 3h 31min ago

Jan 30 16:49:51 np0005602930.novalocal systemd[1]: Reached target Cloud-init target.

○ cryptsetup-pre.target - Local Encrypted Volumes (Pre)
     Loaded: loaded (/usr/lib/systemd/system/cryptsetup-pre.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

● cryptsetup.target - Local Encrypted Volumes
     Loaded: loaded (/usr/lib/systemd/system/cryptsetup.target; static)
     Active: active since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

● edpm_libvirt.target
     Loaded: loaded (/etc/systemd/system/edpm_libvirt.target; static)
     Active: active since Fri 2026-01-30 17:40:00 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:40:00 UTC; 2h 41min ago

Jan 30 17:40:00 compute-0 systemd[1]: Reached target edpm_libvirt.target.

○ emergency.target - Emergency Mode
     Loaded: loaded (/usr/lib/systemd/system/emergency.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

○ first-boot-complete.target - First Boot Complete
     Loaded: loaded (/usr/lib/systemd/system/first-boot-complete.target; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

Jan 30 16:49:47 localhost systemd[1]: First Boot Complete was skipped because of an unmet condition check (ConditionFirstBoot=yes).

○ getty-pre.target - Preparation for Logins
     Loaded: loaded (/usr/lib/systemd/system/getty-pre.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)
             man:systemd-getty-generator(8)
             http://0pointer.de/blog/projects/serial-console.html

● getty.target - Login Prompts
     Loaded: loaded (/usr/lib/systemd/system/getty.target; static)
     Active: active since Fri 2026-01-30 16:49:51 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:51 UTC; 3h 31min ago
       Docs: man:systemd.special(7)
             man:systemd-getty-generator(8)
             http://0pointer.de/blog/projects/serial-console.html

Jan 30 16:49:51 np0005602930.novalocal systemd[1]: Reached target Login Prompts.

○ graphical.target - Graphical Interface
     Loaded: loaded (/usr/lib/systemd/system/graphical.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

○ initrd-fs.target - Initrd File Systems
     Loaded: loaded (/usr/lib/systemd/system/initrd-fs.target; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

Jan 30 16:49:44 localhost systemd[1]: Reached target Initrd File Systems.

○ initrd-root-device.target - Initrd Root Device
     Loaded: loaded (/usr/lib/systemd/system/initrd-root-device.target; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:44 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

Jan 30 16:49:43 localhost systemd[1]: Reached target Initrd Root Device.
Jan 30 16:49:44 localhost systemd[1]: Stopped target Initrd Root Device.

○ initrd-root-fs.target - Initrd Root File System
     Loaded: loaded (/usr/lib/systemd/system/initrd-root-fs.target; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

Jan 30 16:49:44 localhost systemd[1]: Reached target Initrd Root File System.

○ initrd-switch-root.target - Switch Root
     Loaded: loaded (/usr/lib/systemd/system/initrd-switch-root.target; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago

Jan 30 16:49:45 localhost systemd[1]: Reached target Switch Root.

○ initrd-usr-fs.target - Initrd /usr File System
     Loaded: loaded (/usr/lib/systemd/system/initrd-usr-fs.target; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:44 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

Jan 30 16:49:44 localhost systemd[1]: Stopped target Initrd /usr File System.
Notice: journal has been rotated since unit was started, output may be incomplete.

○ initrd.target - Initrd Default Target
     Loaded: loaded (/usr/lib/systemd/system/initrd.target; static)
     Active: inactive (dead) since Fri 2026-01-30 16:49:44 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

Jan 30 16:49:44 localhost systemd[1]: Reached target Initrd Default Target.
Jan 30 16:49:44 localhost systemd[1]: Stopped target Initrd Default Target.

● integritysetup.target - Local Integrity Protected Volumes
     Loaded: loaded (/usr/lib/systemd/system/integritysetup.target; static)
     Active: active since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

● local-fs-pre.target - Preparation for Local File Systems
     Loaded: loaded (/usr/lib/systemd/system/local-fs-pre.target; static)
     Active: active since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

Jan 30 16:49:47 localhost systemd[1]: Reached target Preparation for Local File Systems.

● local-fs.target - Local File Systems
     Loaded: loaded (/usr/lib/systemd/system/local-fs.target; static)
     Active: active since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

Jan 30 16:49:47 localhost systemd[1]: Reached target Local File Systems.

● multi-user.target - Multi-User System
     Loaded: loaded (/usr/lib/systemd/system/multi-user.target; indirect; preset: disabled)
     Active: active since Fri 2026-01-30 16:49:51 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:51 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

Jan 30 16:49:51 np0005602930.novalocal systemd[1]: Reached target Multi-User System.

● network-online.target - Network is Online
     Loaded: loaded (/usr/lib/systemd/system/network-online.target; static)
     Active: active since Fri 2026-01-30 16:49:51 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:51 UTC; 3h 31min ago
       Docs: man:systemd.special(7)
             https://systemd.io/NETWORK_ONLINE

Jan 30 16:49:51 np0005602930.novalocal systemd[1]: Reached target Network is Online.

● network-pre.target - Preparation for Network
     Loaded: loaded (/usr/lib/systemd/system/network-pre.target; static)
     Active: active since Fri 2026-01-30 16:49:49 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:49 UTC; 3h 31min ago
       Docs: man:systemd.special(7)
             https://systemd.io/NETWORK_ONLINE

Jan 30 16:49:49 np0005602930.novalocal systemd[1]: Reached target Preparation for Network.

● network.target - Network
     Loaded: loaded (/usr/lib/systemd/system/network.target; static)
     Active: active since Fri 2026-01-30 16:49:49 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:49 UTC; 3h 31min ago
       Docs: man:systemd.special(7)
             https://systemd.io/NETWORK_ONLINE

Jan 30 16:49:49 np0005602930.novalocal systemd[1]: Reached target Network.

● nfs-client.target - NFS client services
     Loaded: loaded (/usr/lib/systemd/system/nfs-client.target; enabled; preset: disabled)
     Active: active since Fri 2026-01-30 16:49:49 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:49 UTC; 3h 31min ago

Jan 30 16:49:49 np0005602930.novalocal systemd[1]: Reached target NFS client services.

○ nss-lookup.target - Host and Network Name Lookups
     Loaded: loaded (/usr/lib/systemd/system/nss-lookup.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

● nss-user-lookup.target - User and Group Name Lookups
     Loaded: loaded (/usr/lib/systemd/system/nss-user-lookup.target; static)
     Active: active since Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:48 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

Jan 30 16:49:48 localhost systemd[1]: Reached target User and Group Name Lookups.

● paths.target - Path Units
     Loaded: loaded (/usr/lib/systemd/system/paths.target; static)
     Active: active since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

○ remote-cryptsetup.target - Remote Encrypted Volumes
     Loaded: loaded (/usr/lib/systemd/system/remote-cryptsetup.target; disabled; preset: enabled)
     Active: inactive (dead) since Fri 2026-01-30 16:49:44 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

Jan 30 16:49:44 localhost systemd[1]: Reached target Remote Encrypted Volumes.
Jan 30 16:49:44 localhost systemd[1]: Stopped target Remote Encrypted Volumes.

● remote-fs-pre.target - Preparation for Remote File Systems
     Loaded: loaded (/usr/lib/systemd/system/remote-fs-pre.target; static)
     Active: active since Fri 2026-01-30 16:49:49 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:49 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

Jan 30 16:49:49 np0005602930.novalocal systemd[1]: Reached target Preparation for Remote File Systems.

● remote-fs.target - Remote File Systems
     Loaded: loaded (/usr/lib/systemd/system/remote-fs.target; enabled; preset: enabled)
     Active: active since Fri 2026-01-30 16:49:49 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:49 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

Jan 30 16:49:49 np0005602930.novalocal systemd[1]: Reached target Remote File Systems.

○ remote-veritysetup.target - Remote Verity Protected Volumes
     Loaded: loaded (/usr/lib/systemd/system/remote-veritysetup.target; disabled; preset: disabled)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

○ rescue.target - Rescue Mode
     Loaded: loaded (/usr/lib/systemd/system/rescue.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

● rpc_pipefs.target
     Loaded: loaded (/usr/lib/systemd/system/rpc_pipefs.target; static)
     Active: active since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago

● rpcbind.target - RPC Port Mapper
     Loaded: loaded (/usr/lib/systemd/system/rpcbind.target; static)
     Active: active since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

○ shutdown.target - System Shutdown
     Loaded: loaded (/usr/lib/systemd/system/shutdown.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

● slices.target - Slice Units
     Loaded: loaded (/usr/lib/systemd/system/slices.target; static)
     Active: active since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

● sockets.target - Socket Units
     Loaded: loaded (/usr/lib/syUnit syslog.target could not be found.
stemd/system/sockets.target; static)
     Active: active since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

Jan 30 16:49:47 localhost systemd[1]: Reached target Socket Units.

● sshd-keygen.target
     Loaded: loaded (/usr/lib/systemd/system/sshd-keygen.target; static)
     Active: active since Fri 2026-01-30 17:38:03 UTC; 2h 43min ago
      Until: Fri 2026-01-30 17:38:03 UTC; 2h 43min ago

Jan 30 17:38:03 compute-0 systemd[1]: Reached target sshd-keygen.target.

● swap.target - Swaps
     Loaded: loaded (/usr/lib/systemd/system/swap.target; static)
     Active: active since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

● sysinit.target - System Initialization
     Loaded: loaded (/usr/lib/systemd/system/sysinit.target; static)
     Active: active since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

Jan 30 16:49:47 localhost systemd[1]: Reached target System Initialization.

○ time-set.target - System Time Set
     Loaded: loaded (/usr/lib/systemd/system/time-set.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

○ time-sync.target - System Time Synchronized
     Loaded: loaded (/usr/lib/systemd/system/time-sync.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

● timers.target - Timer Units
     Loaded: loaded (/usr/lib/systemd/system/timers.target; static)
     Active: active since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

Jan 30 16:49:47 localhost systemd[1]: Reached target Timer Units.

○ umount.target - Unmount All Filesystems
     Loaded: loaded (/usr/lib/systemd/system/umount.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

○ veritysetup-pre.target - Local Verity Protected Volumes (Pre)
     Loaded: loaded (/usr/lib/systemd/system/veritysetup-pre.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

● veritysetup.target - Local Verity Protected Volumes
     Loaded: loaded (/usr/lib/systemd/system/veritysetup.target; static)
     Active: active since Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:46 UTC; 3h 31min ago
       Docs: man:systemd.special(7)

○ virt-guest-shutdown.target - Libvirt guests shutdown
     Loaded: loaded (/etc/systemd/system/virt-guest-shutdown.target; static)
     Active: inactive (dead)
       Docs: https://libvirt.org

● 573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b-7930ac65c70ecc71.timer - /usr/bin/podman healthcheck run 573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b
     Loaded: loaded (/run/systemd/transient/573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b-7930ac65c70ecc71.timer; transient)
  Transient: yes
     Active: active (waiting) since Fri 2026-01-30 17:35:02 UTC; 2h 46min ago
      Until: Fri 2026-01-30 17:35:02 UTC; 2h 46min ago
    Trigger: Fri 2026-01-30 20:21:27 UTC; 6s left
   Triggers: ● 573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b-7930ac65c70ecc71.service

Jan 30 17:35:02 compute-0 systemd[1]: Started /usr/bin/podman healthcheck run 573b87d03f7662224a594c8f18bcaa5cf915954db225b1f6250163bd89fd9d7b.

● 60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5-47b3e5005a023219.timer - /usr/bin/podman healthcheck run 60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5
     Loaded: loaded (/run/systemd/transient/60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5-47b3e5005a023219.timer; transient)
  Transient: yes
     Active: active (waiting) since Fri 2026-01-30 17:43:22 UTC; 2h 37min ago
      Until: Fri 2026-01-30 17:43:22 UTC; 2h 37min ago
    Trigger: Fri 2026-01-30 20:21:45 UTC; 25s left
   Triggers: ● 60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5-47b3e5005a023219.service

Jan 30 17:43:22 compute-0 systemd[1]: Started /usr/bin/podman healthcheck run 60abe432233f84ee02b26fecc9e58214d9241d1664b4d303a62374f30bede4d5.

● 60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97-29274ca09f3589dd.timer - /usr/bin/podman healthcheck run 60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97
     Loaded: loaded (/run/systemd/transient/60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97-29274ca09f3589dd.timer; transient)
  Transient: yes
     Active: active (waiting) since Fri 2026-01-30 17:34:05 UTC; 2h 47min ago
      Until: Fri 2026-01-30 17:34:05 UTC; 2h 47min ago
    Trigger: Fri 2026-01-30 20:21:32 UTC; 12s left
   Triggers: ● 60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97-29274ca09f3589dd.service

Jan 30 17:34:05 compute-0 systemd[1]: Started /usr/bin/podman healthcheck run 60c72dccae9ec19de23e1b830e2dd35261a2d365f9fc9bab823fdf50de4afa97.

● 8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a-765fa5d442117b50.timer - /usr/bin/podman healthcheck run 8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a
     Loaded: loaded (/run/systemd/transient/8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a-765fa5d442117b50.timer; transient)
  Transient: yes
     Active: active (waiting) since Fri 2026-01-30 17:44:14 UTC; 2h 37min ago
      Until: Fri 2026-01-30 17:44:14 UTC; 2h 37min ago
    Trigger: Fri 2026-01-30 20:21:32 UTC; 12s left
   Triggers: ● 8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a-765fa5d442117b50.service

Jan 30 17:44:14 compute-0 systemd[1]: Started /usr/bin/podman healthcheck run 8d99429c23c2c912a8c9c41807874bb8391989546689fd530bb5f2e6b1f2126a.

● ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735-302036fd2481bd88.timer - /usr/bin/podman healthcheck run ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735
     Loaded: loaded (/run/systemd/transient/ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735-302036fd2481bd88.timer; transient)
  Transient: yes
     Active: active (waiting) since Fri 2026-01-30 17:43:54 UTC; 2h 37min ago
      Until: Fri 2026-01-30 17:43:54 UTC; 2h 37min ago
    Trigger: Fri 2026-01-30 20:21:45 UTC; 25s left
   Triggers: ● ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735-302036fd2481bd88.service

Jan 30 17:43:54 compute-0 systemd[1]: Started /usr/bin/podman healthcheck run ccf43e95299bd27efe596e42fc444f52d7c748608b415430b38d6055cf76f735.

● dnf-makecache.timer - dnf makecache --timer
     Loaded: loaded (/usr/lib/systemd/system/dnf-makecache.timer; enabled; preset: enabled)
     Active: active (waiting) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
    Trigger: Fri 2026-01-30 20:34:17 UTC; 12min left
   Triggers: ● dnf-makecache.service

Jan 30 16:49:47 localhost systemd[1]: Started dnf makecache --timer.

● e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0-5fafc72f56285aa6.timer - /usr/bin/podman healthcheck run e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0
     Loaded: loaded (/run/systemd/transient/e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0-5fafc72f56285aa6.timer; transient)
  Transient: yes
     Active: active (waiting) since Fri 2026-01-30 17:43:36 UTC; 2h 37min ago
      Until: Fri 2026-01-30 17:43:36 UTC; 2h 37min ago
    Trigger: Fri 2026-01-30 20:21:27 UTC; 6s left
   Triggers: ● e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0-5fafc72f56285aa6.service

Jan 30 17:43:36 compute-0 systemd[1]: Started /usr/bin/podman healthcheck run e532a475b9efcf6db3bcd0b699b3d9219ba6bdb4bb0d508f9b9ef4aaea0fb7a0.

● logrotate.timer - Daily rotation of log files
     Loaded: loaded (/usr/lib/systemd/system/logrotate.timer; enabled; preset: enabled)
     Active: active (waiting) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
    Trigger: Sat 2026-01-31 00:00:00 UTC; 3h 38min left
   Triggers: ● logrotate.service
       Docs: man:logrotate(8)
             man:logrotate.conf(5)

Jan 30 16:49:47 localhost systemd[1]: Started Daily rotation of log files.

○ raid-check.timer - Weekly RAID setup health check
     Loaded: loaded (/usr/lib/systemd/system/raid-check.timer; enabled; preset: enabled)
     Active: inactive (dead)
    Trigger: n/a
   Triggers: ● raid-check.service

○ sysstat-collect.timer - Run system activity accounting tool every 10 minutes
     Loaded: loaded (/usr/lib/systemd/system/sysstat-collect.timer; enabled; preset: disabled)
     Active: inactive (dead)
    Trigger: n/a
   Triggers: ● sysstat-collect.service

○ sysstat-summary.timer - Generate summary of yesterday's process accounting
     Loaded: loaded (/usr/lib/systemd/system/sysstat-summary.timer; enabled; preset: disabled)
     Active: inactive (dead)
    Trigger: n/a
   Triggers: ● sysstat-summary.service

● systemd-tmpfiles-clean.timer - Daily Cleanup of Temporary Directories
     Loaded: loaded (/usr/lib/systemd/system/systemd-tmpfiles-clean.timer; static)
     Active: active (waiting) since Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:49:47 UTC; 3h 31min ago
    Trigger: Sat 2026-01-31 17:05:10 UTC; 20h left
   Triggers: ● systemd-tmpfiles-clean.service
       Docs: man:tmpfiles.d(5)
             man:systemd-tmpfiles(8)

Jan 30 16:49:47 localhost systemd[1]: Started Daily Cleanup of Temporary Directories.

● unbound-anchor.timer - daily update of the root trust anchor for DNSSEC
     Loaded: loaded (/usr/lib/systemd/system/unbound-anchor.timer; enabled; preset: enabled)
     Active: active (waiting) since Fri 2026-01-30 17:29:03 UTC; 2h 52min ago
      Until: Fri 2026-01-30 17:29:03 UTC; 2h 52min ago
    Trigger: Sat 2026-01-31 00:00:00 UTC; 3h 38min left
   Triggers: ● unbound-anchor.service
       Docs: man:unbound-anchor(8)

Jan 30 17:29:03 compute-0 systemd[1]: Started daily update of the root trust anchor for DNSSEC.
