● compute-1
    State: running
    Units: 497 loaded (incl. loaded aliases)
     Jobs: 0 queued
   Failed: 0 units
    Since: Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
  systemd: 252-64.el9
   CGroup: /
           ├─271209 turbostat --debug sleep 10
           ├─271215 sleep 10
           ├─init.scope
           │ └─1 /usr/lib/systemd/systemd --switched-root --system --deserialize 31
           ├─machine.slice
           │ ├─libpod-407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a.scope
           │ │ └─container
           │ │   └─196153 /bin/node_exporter --web.config.file=/etc/node_exporter/node_exporter.yaml --web.disable-exporter-metrics --collector.systemd "--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service" --no-collector.dmi --no-collector.entropy --no-collector.thermal_zone --no-collector.time --no-collector.timex --no-collector.uname --no-collector.stat --no-collector.hwmon --no-collector.os --no-collector.selinux --no-collector.textfile --no-collector.powersupplyclass --no-collector.pressure --no-collector.rapl
           │ ├─libpod-455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b.scope
           │ │ └─container
           │ │   ├─183473 dumb-init --single-child -- kolla_start
           │ │   ├─183475 /usr/bin/python3 /usr/bin/nova-compute
           │ │   ├─213354 /usr/bin/python3 /bin/privsep-helper --config-file /etc/nova/nova.conf --config-file /etc/nova/nova-compute.conf --config-dir /etc/nova/nova.conf.d --privsep_context nova.privsep.sys_admin_pctxt --privsep_sock_path /tmp/tmp33az_fjs/privsep.sock
           │ │   └─213418 /usr/bin/python3 /bin/privsep-helper --config-file /etc/nova/nova.conf --config-file /etc/nova/nova-compute.conf --config-dir /etc/nova/nova.conf.d --privsep_context vif_plug_ovs.privsep.vif_plug --privsep_sock_path /tmp/tmptmqu8bjz/privsep.sock
           │ ├─libpod-4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd.scope
           │ │ └─container
           │ │   ├─193155 dumb-init --single-child -- kolla_start
           │ │   ├─193158 "ceilometer-polling: master process [/usr/bin/ceilometer-polling --polling-namespaces compute --logfile /dev/stdout]"
           │ │   └─193336 "ceilometer-polling: AgentManager worker(0)"
           │ ├─libpod-834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20.scope
           │ │ └─container
           │ │   ├─105006 dumb-init --single-child -- kolla_start
           │ │   ├─105009 "neutron-ovn-metadata-agent (/usr/bin/python3 /usr/bin/neutron-ovn-metadata-agent)"
           │ │   ├─105396 "neutron-ovn-metadata-agent (/usr/bin/python3 /usr/bin/neutron-ovn-metadata-agent)"
           │ │   ├─105526 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.namespace_cmd --privsep_sock_path /tmp/tmph8sg2l6q/privsep.sock
           │ │   ├─212908 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.default --privsep_sock_path /tmp/tmpu4b226u9/privsep.sock
           │ │   └─213561 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.link_cmd --privsep_sock_path /tmp/tmpg6ekdec0/privsep.sock
           │ ├─libpod-883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037.scope
           │ │ └─container
           │ │   ├─251554 dumb-init --single-child -- kolla_start
           │ │   └─251557 /usr/bin/ovn-controller --pidfile unix:/run/openvswitch/db.sock -p /etc/pki/tls/private/ovndb.key -c /etc/pki/tls/certs/ovndb.crt -C /etc/pki/tls/certs/ovndbca.crt
           │ ├─libpod-bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98.scope
           │ │ └─container
           │ │   └─199285 /bin/podman_exporter --web.config.file=/etc/podman_exporter/podman_exporter.yaml
           │ ├─libpod-cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18.scope
           │ │ └─container
           │ │   ├─216789 dumb-init --single-child -- /bin/bash -c "exec /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/20be9363-8d31-4010-8379-2f4db75ec5ee.conf"
           │ │   ├─216791 /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/20be9363-8d31-4010-8379-2f4db75ec5ee.conf
           │ │   └─216793 /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/20be9363-8d31-4010-8379-2f4db75ec5ee.conf
           │ ├─libpod-conmon-cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18.scope
           │ │ └─216787 /usr/bin/conmon --api-version 1 -c cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18 -u cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18/userdata -p /run/containers/storage/overlay-containers/cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18/userdata/pidfile -n neutron-haproxy-ovnmeta-20be9363-8d31-4010-8379-2f4db75ec5ee --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18/userdata/oci-log --conmon-pidfile /run/containers/storage/overlay-containers/cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18
           │ ├─libpod-conmon-ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab.scope
           │ │ └─216861 /usr/bin/conmon --api-version 1 -c ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab -u ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab/userdata -p /run/containers/storage/overlay-containers/ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab/userdata/pidfile -n neutron-haproxy-ovnmeta-311f9b45-49be-4345-9ac8-a1fe5a0b8a53 --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab/userdata/oci-log --conmon-pidfile /run/containers/storage/overlay-containers/ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab
           │ ├─libpod-ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab.scope
           │ │ └─container
           │ │   ├─216863 dumb-init --single-child -- /bin/bash -c "exec /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/311f9b45-49be-4345-9ac8-a1fe5a0b8a53.conf"
           │ │   ├─216866 /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/311f9b45-49be-4345-9ac8-a1fe5a0b8a53.conf
           │ │   └─216868 /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/311f9b45-49be-4345-9ac8-a1fe5a0b8a53.conf
           │ ├─libpod-e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a.scope
           │ │ └─container
           │ │   └─202410 /app/openstack-network-exporter
           │ ├─machine-qemu\x2d7\x2dinstance\x2d0000000e.scope
           │ │ └─libvirt
           │ │   └─216693 /usr/libexec/qemu-kvm -name guest=instance-0000000e,debug-threads=on -S -object "{\"qom-type\":\"secret\",\"id\":\"masterKey0\",\"format\":\"raw\",\"file\":\"/var/lib/libvirt/qemu/domain-7-instance-0000000e/master-key.aes\"}" -machine pc-q35-rhel9.8.0,usb=off,dump-guest-core=off,memory-backend=pc.ram,hpet=off,acpi=on -accel kvm -cpu EPYC-Rome,x2apic=on,tsc-deadline=on,hypervisor=on,tsc-adjust=on,spec-ctrl=on,stibp=on,ssbd=on,cmp-legacy=on,overflow-recov=on,succor=on,ibrs=on,amd-ssbd=on,virt-ssbd=on,lbrv=on,tsc-scale=on,vmcb-clean=on,flushbyasid=on,pause-filter=on,pfthreshold=on,svme-addr-chk=on,lfence-always-serializing=on,xsaves=off -m size=131072k -object "{\"qom-type\":\"memory-backend-ram\",\"id\":\"pc.ram\",\"size\":134217728}" -overcommit mem-lock=off -smp 1,sockets=1,dies=1,clusters=1,cores=1,threads=1 -uuid 79f0f99a-6f26-49ed-ac15-d149313db321 -smbios "type=1,manufacturer=RDO,product=OpenStack Compute,version=27.5.2-0.20260127144738.eaa65f0.el9,serial=79f0f99a-6f26-49ed-ac15-d149313db321,uuid=79f0f99a-6f26-49ed-ac15-d149313db321,family=Virtual Machine" -no-user-config -nodefaults -chardev socket,id=charmonitor,fd=28,server=on,wait=off -mon chardev=charmonitor,id=monitor,mode=control -rtc base=utc,driftfix=slew -global kvm-pit.lost_tick_policy=delay -no-shutdown -boot strict=on -device "{\"driver\":\"pcie-root-port\",\"port\":16,\"chassis\":1,\"id\":\"pci.1\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":17,\"chassis\":2,\"id\":\"pci.2\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":18,\"chassis\":3,\"id\":\"pci.3\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":19,\"chassis\":4,\"id\":\"pci.4\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":20,\"chassis\":5,\"id\":\"pci.5\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":21,\"chassis\":6,\"id\":\"pci.6\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":22,\"chassis\":7,\"id\":\"pci.7\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":23,\"chassis\":8,\"id\":\"pci.8\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":24,\"chassis\":9,\"id\":\"pci.9\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":25,\"chassis\":10,\"id\":\"pci.10\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":26,\"chassis\":11,\"id\":\"pci.11\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":27,\"chassis\":12,\"id\":\"pci.12\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":28,\"chassis\":13,\"id\":\"pci.13\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":29,\"chassis\":14,\"id\":\"pci.14\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":30,\"chassis\":15,\"id\":\"pci.15\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":31,\"chassis\":16,\"id\":\"pci.16\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":32,\"chassis\":17,\"id\":\"pci.17\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":33,\"chassis\":18,\"id\":\"pci.18\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":34,\"chassis\":19,\"id\":\"pci.19\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":35,\"chassis\":20,\"id\":\"pci.20\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":36,\"chassis\":21,\"id\":\"pci.21\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":37,\"chassis\":22,\"id\":\"pci.22\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":38,\"chassis\":23,\"id\":\"pci.23\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":39,\"chassis\":24,\"id\":\"pci.24\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":40,\"chassis\":25,\"id\":\"pci.25\",\"bus\":\"pcie.0\",\"addr\":\"0x5\"}" -device "{\"driver\":\"pcie-pci-bridge\",\"id\":\"pci.26\",\"bus\":\"pci.1\",\"addr\":\"0x0\"}" -device "{\"driver\":\"piix3-usb-uhci\",\"id\":\"usb\",\"bus\":\"pci.26\",\"addr\":\"0x1\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/_base/d8b1339cb6a58d9c630b446503c76b090a4f368e\",\"node-name\":\"libvirt-3-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/79f0f99a-6f26-49ed-ac15-d149313db321/disk\",\"node-name\":\"libvirt-2-storage\",\"auto-read-only\":true,\"discard\":\"unmap\",\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"node-name\":\"libvirt-2-format\",\"read-only\":false,\"cache\":{\"direct\":true,\"no-flush\":false},\"driver\":\"qcow2\",\"file\":\"libvirt-2-storage\",\"backing\":\"libvirt-3-storage\"}" -device "{\"driver\":\"virtio-blk-pci\",\"bus\":\"pci.4\",\"addr\":\"0x0\",\"drive\":\"libvirt-2-format\",\"id\":\"virtio-disk0\",\"bootindex\":1,\"write-cache\":\"on\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/79f0f99a-6f26-49ed-ac15-d149313db321/disk.config\",\"node-name\":\"libvirt-1-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -device "{\"driver\":\"ide-cd\",\"bus\":\"ide.0\",\"drive\":\"libvirt-1-storage\",\"id\":\"sata0-0-0\",\"write-cache\":\"on\"}" -netdev "{\"type\":\"tap\",\"fd\":\"32\",\"vhost\":true,\"vhostfd\":\"37\",\"id\":\"hostnet0\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet0\",\"id\":\"net0\",\"mac\":\"fa:16:3e:28:e9:1d\",\"bus\":\"pci.2\",\"addr\":\"0x0\"}" -netdev "{\"type\":\"tap\",\"fd\":\"38\",\"vhost\":true,\"vhostfd\":\"39\",\"id\":\"hostnet1\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet1\",\"id\":\"net1\",\"mac\":\"fa:16:3e:0b:98:69\",\"bus\":\"pci.3\",\"addr\":\"0x0\"}" -add-fd set=0,fd=30,opaque=serial0-log -chardev pty,id=charserial0,logfile=/dev/fdset/0,logappend=on -device "{\"driver\":\"isa-serial\",\"chardev\":\"charserial0\",\"id\":\"serial0\",\"index\":0}" -device "{\"driver\":\"usb-tablet\",\"id\":\"input0\",\"bus\":\"usb.0\",\"port\":\"1\"}" -audiodev "{\"id\":\"audio1\",\"driver\":\"none\"}" -object "{\"qom-type\":\"tls-creds-x509\",\"id\":\"vnc-tls-creds0\",\"dir\":\"/etc/pki/qemu\",\"endpoint\":\"server\",\"verify-peer\":true}" -vnc "[::0]:0,tls-creds=vnc-tls-creds0,audiodev=audio1" -device "{\"driver\":\"virtio-vga\",\"id\":\"video0\",\"max_outputs\":1,\"bus\":\"pcie.0\",\"addr\":\"0x1\"}" -global ICH9-LPC.noreboot=off -watchdog-action reset -device "{\"driver\":\"virtio-balloon-pci\",\"id\":\"balloon0\",\"bus\":\"pci.5\",\"addr\":\"0x0\"}" -object "{\"qom-type\":\"rng-random\",\"id\":\"objrng0\",\"filename\":\"/dev/urandom\"}" -device "{\"driver\":\"virtio-rng-pci\",\"rng\":\"objrng0\",\"id\":\"rng0\",\"bus\":\"pci.6\",\"addr\":\"0x0\"}" -device "{\"driver\":\"vmcoreinfo\"}" -sandbox on,obsolete=deny,elevateprivileges=deny,spawn=deny,resourcecontrol=deny -msg timestamp=on
           │ └─machine-qemu\x2d8\x2dinstance\x2d00000010.scope
           │   └─libvirt
           │     └─217311 /usr/libexec/qemu-kvm -name guest=instance-00000010,debug-threads=on -S -object "{\"qom-type\":\"secret\",\"id\":\"masterKey0\",\"format\":\"raw\",\"file\":\"/var/lib/libvirt/qemu/domain-8-instance-00000010/master-key.aes\"}" -machine pc-q35-rhel9.8.0,usb=off,dump-guest-core=off,memory-backend=pc.ram,hpet=off,acpi=on -accel kvm -cpu EPYC-Rome,x2apic=on,tsc-deadline=on,hypervisor=on,tsc-adjust=on,spec-ctrl=on,stibp=on,ssbd=on,cmp-legacy=on,overflow-recov=on,succor=on,ibrs=on,amd-ssbd=on,virt-ssbd=on,lbrv=on,tsc-scale=on,vmcb-clean=on,flushbyasid=on,pause-filter=on,pfthreshold=on,svme-addr-chk=on,lfence-always-serializing=on,xsaves=off -m size=131072k -object "{\"qom-type\":\"memory-backend-ram\",\"id\":\"pc.ram\",\"size\":134217728}" -overcommit mem-lock=off -smp 1,sockets=1,dies=1,clusters=1,cores=1,threads=1 -uuid bdd94295-7a8b-44b3-91c8-846d36f784a0 -smbios "type=1,manufacturer=RDO,product=OpenStack Compute,version=27.5.2-0.20260127144738.eaa65f0.el9,serial=bdd94295-7a8b-44b3-91c8-846d36f784a0,uuid=bdd94295-7a8b-44b3-91c8-846d36f784a0,family=Virtual Machine" -no-user-config -nodefaults -chardev socket,id=charmonitor,fd=34,server=on,wait=off -mon chardev=charmonitor,id=monitor,mode=control -rtc base=utc,driftfix=slew -global kvm-pit.lost_tick_policy=delay -no-shutdown -boot strict=on -device "{\"driver\":\"pcie-root-port\",\"port\":16,\"chassis\":1,\"id\":\"pci.1\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":17,\"chassis\":2,\"id\":\"pci.2\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":18,\"chassis\":3,\"id\":\"pci.3\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":19,\"chassis\":4,\"id\":\"pci.4\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":20,\"chassis\":5,\"id\":\"pci.5\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":21,\"chassis\":6,\"id\":\"pci.6\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":22,\"chassis\":7,\"id\":\"pci.7\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":23,\"chassis\":8,\"id\":\"pci.8\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":24,\"chassis\":9,\"id\":\"pci.9\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":25,\"chassis\":10,\"id\":\"pci.10\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":26,\"chassis\":11,\"id\":\"pci.11\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":27,\"chassis\":12,\"id\":\"pci.12\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":28,\"chassis\":13,\"id\":\"pci.13\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":29,\"chassis\":14,\"id\":\"pci.14\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":30,\"chassis\":15,\"id\":\"pci.15\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":31,\"chassis\":16,\"id\":\"pci.16\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":32,\"chassis\":17,\"id\":\"pci.17\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":33,\"chassis\":18,\"id\":\"pci.18\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":34,\"chassis\":19,\"id\":\"pci.19\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":35,\"chassis\":20,\"id\":\"pci.20\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":36,\"chassis\":21,\"id\":\"pci.21\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":37,\"chassis\":22,\"id\":\"pci.22\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":38,\"chassis\":23,\"id\":\"pci.23\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":39,\"chassis\":24,\"id\":\"pci.24\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":40,\"chassis\":25,\"id\":\"pci.25\",\"bus\":\"pcie.0\",\"addr\":\"0x5\"}" -device "{\"driver\":\"pcie-pci-bridge\",\"id\":\"pci.26\",\"bus\":\"pci.1\",\"addr\":\"0x0\"}" -device "{\"driver\":\"piix3-usb-uhci\",\"id\":\"usb\",\"bus\":\"pci.26\",\"addr\":\"0x1\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/_base/d8b1339cb6a58d9c630b446503c76b090a4f368e\",\"node-name\":\"libvirt-3-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/bdd94295-7a8b-44b3-91c8-846d36f784a0/disk\",\"node-name\":\"libvirt-2-storage\",\"auto-read-only\":true,\"discard\":\"unmap\",\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"node-name\":\"libvirt-2-format\",\"read-only\":false,\"cache\":{\"direct\":true,\"no-flush\":false},\"driver\":\"qcow2\",\"file\":\"libvirt-2-storage\",\"backing\":\"libvirt-3-storage\"}" -device "{\"driver\":\"virtio-blk-pci\",\"bus\":\"pci.4\",\"addr\":\"0x0\",\"drive\":\"libvirt-2-format\",\"id\":\"virtio-disk0\",\"bootindex\":1,\"write-cache\":\"on\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/bdd94295-7a8b-44b3-91c8-846d36f784a0/disk.config\",\"node-name\":\"libvirt-1-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -device "{\"driver\":\"ide-cd\",\"bus\":\"ide.0\",\"drive\":\"libvirt-1-storage\",\"id\":\"sata0-0-0\",\"write-cache\":\"on\"}" -netdev "{\"type\":\"tap\",\"fd\":\"39\",\"vhost\":true,\"vhostfd\":\"41\",\"id\":\"hostnet0\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet0\",\"id\":\"net0\",\"mac\":\"fa:16:3e:16:12:2b\",\"bus\":\"pci.2\",\"addr\":\"0x0\"}" -netdev "{\"type\":\"tap\",\"fd\":\"42\",\"vhost\":true,\"vhostfd\":\"47\",\"id\":\"hostnet1\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet1\",\"id\":\"net1\",\"mac\":\"fa:16:3e:9a:66:89\",\"bus\":\"pci.3\",\"addr\":\"0x0\"}" -add-fd set=0,fd=38,opaque=serial0-log -chardev pty,id=charserial0,logfile=/dev/fdset/0,logappend=on -device "{\"driver\":\"isa-serial\",\"chardev\":\"charserial0\",\"id\":\"serial0\",\"index\":0}" -device "{\"driver\":\"usb-tablet\",\"id\":\"input0\",\"bus\":\"usb.0\",\"port\":\"1\"}" -audiodev "{\"id\":\"audio1\",\"driver\":\"none\"}" -object "{\"qom-type\":\"tls-creds-x509\",\"id\":\"vnc-tls-creds0\",\"dir\":\"/etc/pki/qemu\",\"endpoint\":\"server\",\"verify-peer\":true}" -vnc "[::0]:1,tls-creds=vnc-tls-creds0,audiodev=audio1" -device "{\"driver\":\"virtio-vga\",\"id\":\"video0\",\"max_outputs\":1,\"bus\":\"pcie.0\",\"addr\":\"0x1\"}" -global ICH9-LPC.noreboot=off -watchdog-action reset -device "{\"driver\":\"virtio-balloon-pci\",\"id\":\"balloon0\",\"bus\":\"pci.5\",\"addr\":\"0x0\"}" -object "{\"qom-type\":\"rng-random\",\"id\":\"objrng0\",\"filename\":\"/dev/urandom\"}" -device "{\"driver\":\"virtio-rng-pci\",\"rng\":\"objrng0\",\"id\":\"rng0\",\"bus\":\"pci.6\",\"addr\":\"0x0\"}" -device "{\"driver\":\"vmcoreinfo\"}" -sandbox on,obsolete=deny,elevateprivileges=deny,spawn=deny,resourcecontrol=deny -msg timestamp=on
           ├─system.slice
           │ ├─NetworkManager.service
           │ │ └─55963 /usr/sbin/NetworkManager --no-daemon
           │ ├─auditd.service
           │ │ ├─704 /sbin/auditd
           │ │ └─706 /usr/sbin/sedispatch
           │ ├─chronyd.service
           │ │ └─65304 /usr/sbin/chronyd -F 2
           │ ├─crond.service
           │ │ └─1010 /usr/sbin/crond -n
           │ ├─dbus-broker.service
           │ │ ├─765 /usr/bin/dbus-broker-launch --scope system --audit
           │ │ └─776 dbus-broker --log 4 --controller 9 --machine-id bf0bc0bb03de29b24cba1cc9599cf5d0 --max-bytes 536870912 --max-fds 4096 --max-matches 131072 --audit
           │ ├─edpm_ceilometer_agent_compute.service
           │ │ └─193153 /usr/bin/conmon --api-version 1 -c 4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd -u 4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd/userdata -p /run/containers/storage/overlay-containers/4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd/userdata/pidfile -n ceilometer_agent_compute --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd/userdata/oci-log --conmon-pidfile /run/ceilometer_agent_compute.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd
           │ ├─edpm_node_exporter.service
           │ │ └─196151 /usr/bin/conmon --api-version 1 -c 407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a -u 407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a/userdata -p /run/containers/storage/overlay-containers/407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a/userdata/pidfile -n node_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a/userdata/oci-log --conmon-pidfile /run/node_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a
           │ ├─edpm_nova_compute.service
           │ │ └─183471 /usr/bin/conmon --api-version 1 -c 455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b -u 455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b/userdata -p /run/containers/storage/overlay-containers/455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b/userdata/pidfile -n nova_compute --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b/userdata/oci-log --conmon-pidfile /run/nova_compute.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b
           │ ├─edpm_openstack_network_exporter.service
           │ │ └─202408 /usr/bin/conmon --api-version 1 -c e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a -u e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a/userdata -p /run/containers/storage/overlay-containers/e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a/userdata/pidfile -n openstack_network_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a/userdata/oci-log --conmon-pidfile /run/openstack_network_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a
           │ ├─edpm_ovn_controller.service
           │ │ └─251552 /usr/bin/conmon --api-version 1 -c 883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037 -u 883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037/userdata -p /run/containers/storage/overlay-containers/883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037/userdata/pidfile -n ovn_controller --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037/userdata/oci-log --conmon-pidfile /run/ovn_controller.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037
           │ ├─edpm_ovn_metadata_agent.service
           │ │ └─105004 /usr/bin/conmon --api-version 1 -c 834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20 -u 834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20/userdata -p /run/containers/storage/overlay-containers/834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20/userdata/pidfile -n ovn_metadata_agent --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20/userdata/oci-log --conmon-pidfile /run/ovn_metadata_agent.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20
           │ ├─edpm_podman_exporter.service
           │ │ └─199283 /usr/bin/conmon --api-version 1 -c bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98 -u bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98/userdata -p /run/containers/storage/overlay-containers/bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98/userdata/pidfile -n podman_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98/userdata/oci-log --conmon-pidfile /run/podman_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98
           │ ├─gssproxy.service
           │ │ └─879 /usr/sbin/gssproxy -D
           │ ├─irqbalance.service
           │ │ └─790 /usr/sbin/irqbalance
           │ ├─iscsid.service
           │ │ └─169133 /usr/sbin/iscsid -f
           │ ├─multipathd.service
           │ │ └─169292 /sbin/multipathd -d -s
           │ ├─ovs-vswitchd.service
           │ │ └─54264 ovs-vswitchd unix:/var/run/openvswitch/db.sock -vconsole:emer -vsyslog:err -vfile:info --mlockall --user openvswitch:hugetlbfs --no-chdir --log-file=/var/log/openvswitch/ovs-vswitchd.log --pidfile=/var/run/openvswitch/ovs-vswitchd.pid --detach
           │ ├─ovsdb-server.service
           │ │ └─54182 ovsdb-server /etc/openvswitch/conf.db -vconsole:emer -vsyslog:err -vfile:info --remote=punix:/var/run/openvswitch/db.sock --private-key=db:Open_vSwitch,SSL,private_key --certificate=db:Open_vSwitch,SSL,certificate --bootstrap-ca-cert=db:Open_vSwitch,SSL,ca_cert --user openvswitch:hugetlbfs --no-chdir --log-file=/var/log/openvswitch/ovsdb-server.log --pidfile=/var/run/openvswitch/ovsdb-server.pid --detach
           │ ├─podman.service
           │ │ └─199294 /usr/bin/podman --log-level=info system service
           │ ├─polkit.service
           │ │ └─44106 /usr/lib/polkit-1/polkitd --no-debug
           │ ├─rpcbind.service
           │ │ └─702 /usr/bin/rpcbind -w -f
           │ ├─rsyslog.service
           │ │ └─1006 /usr/sbin/rsyslogd -n
           │ ├─sshd.service
           │ │ └─129855 "sshd: /usr/sbin/sshd -D [listener] 0 of 10-100 startups"
           │ ├─system-getty.slice
           │ │ └─getty@tty1.service
           │ │   └─1011 /sbin/agetty -o "-p -- \\u" --noclear - linux
           │ ├─system-serial\x2dgetty.slice
           │ │ └─serial-getty@ttyS0.service
           │ │   └─1012 /sbin/agetty -o "-p -- \\u" --keep-baud 115200,57600,38400,9600 - vt220
           │ ├─systemd-hostnamed.service
           │ │ └─268676 /usr/lib/systemd/systemd-hostnamed
           │ ├─systemd-journald.service
           │ │ └─679 /usr/lib/systemd/systemd-journald
           │ ├─systemd-logind.service
           │ │ └─808 /usr/lib/systemd/systemd-logind
           │ ├─systemd-machined.service
           │ │ └─154781 /usr/lib/systemd/systemd-machined
           │ ├─systemd-udevd.service
           │ │ └─udev
           │ │   └─732 /usr/lib/systemd/systemd-udevd
           │ ├─tuned.service
           │ │ └─44283 /usr/bin/python3 -Es /usr/sbin/tuned -l -P
           │ ├─virtlogd.service
           │ │ └─154150 /usr/sbin/virtlogd
           │ ├─virtnodedevd.service
           │ │ └─183389 /usr/sbin/virtnodedevd --timeout 120
           │ └─virtqemud.service
           │   └─183098 /usr/sbin/virtqemud --timeout 120
           └─user.slice
             └─user-1000.slice
               ├─session-1.scope
               │ └─4523 /usr/bin/python3
               ├─session-100.scope
               │ ├─247036 "sshd-session: zuul [priv]"
               │ └─247039 "sshd-session: zuul@notty"
               ├─session-102.scope
               │ ├─247152 "sshd-session: zuul [priv]"
               │ └─247155 "sshd-session: zuul@notty"
               ├─session-105.scope
               │ ├─247407 "sshd-session: zuul [priv]"
               │ └─247410 "sshd-session: zuul@notty"
               ├─session-107.scope
               │ ├─247511 "sshd-session: zuul [priv]"
               │ └─247514 "sshd-session: zuul@notty"
               ├─session-108.scope
               │ ├─247824 "sshd-session: zuul [priv]"
               │ └─247827 "sshd-session: zuul@notty"
               ├─session-110.scope
               │ ├─247944 "sshd-session: zuul [priv]"
               │ └─247977 "sshd-session: zuul@notty"
               ├─session-111.scope
               │ ├─248004 "sshd-session: zuul [priv]"
               │ └─248007 "sshd-session: zuul@notty"
               ├─session-113.scope
               │ ├─248105 "sshd-session: zuul [priv]"
               │ └─248108 "sshd-session: zuul@notty"
               ├─session-120.scope
               │ ├─248982 "sshd-session: zuul [priv]"
               │ └─248985 "sshd-session: zuul@notty"
               ├─session-122.scope
               │ ├─249152 "sshd-session: zuul [priv]"
               │ └─249196 "sshd-session: zuul@notty"
               ├─session-124.scope
               │ ├─249529 "sshd-session: zuul [priv]"
               │ └─249532 "sshd-session: zuul@notty"
               ├─session-126.scope
               │ ├─249678 "sshd-session: zuul [priv]"
               │ └─249681 "sshd-session: zuul@notty"
               ├─session-128.scope
               │ ├─249968 "sshd-session: zuul [priv]"
               │ └─249971 "sshd-session: zuul@notty"
               ├─session-130.scope
               │ ├─250070 "sshd-session: zuul [priv]"
               │ └─250073 "sshd-session: zuul@notty"
               ├─session-131.scope
               │ ├─250101 "sshd-session: zuul [priv]"
               │ └─250104 "sshd-session: zuul@notty"
               ├─session-133.scope
               │ ├─250202 "sshd-session: zuul [priv]"
               │ └─250205 "sshd-session: zuul@notty"
               ├─session-134.scope
               │ ├─250290 "sshd-session: zuul [priv]"
               │ └─250312 "sshd-session: zuul@notty"
               ├─session-136.scope
               │ ├─250434 "sshd-session: zuul [priv]"
               │ └─250449 "sshd-session: zuul@notty"
               ├─session-162.scope
               │ ├─267972 "sshd-session: zuul [priv]"
               │ ├─267975 "sshd-session: zuul@notty"
               │ ├─267976 sudo bash -c "rm -rf /var/tmp/sos-osp && mkdir /var/tmp/sos-osp && sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp  -p container,openstack_edpm,system,storage,virt"
               │ ├─268000 /usr/bin/python3 -s /sbin/sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp -p container,openstack_edpm,system,storage,virt
               │ ├─271208 timeout 15s turbostat --debug sleep 10
               │ ├─272225 timeout 300s systemctl status --all
               │ ├─272226 systemctl status --all
               │ ├─272228 timeout --foreground 300s virsh -r nodedev-dumpxml computer
               │ └─272229 /usr/bin/python3 -s /sbin/sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp -p container,openstack_edpm,system,storage,virt
               ├─session-48.scope
               │ ├─242639 "sshd-session: zuul [priv]"
               │ └─242642 "sshd-session: zuul@notty"
               ├─session-50.scope
               │ ├─242795 "sshd-session: zuul [priv]"
               │ └─242798 "sshd-session: zuul@notty"
               ├─session-51.scope
               │ ├─242870 "sshd-session: zuul [priv]"
               │ └─242873 "sshd-session: zuul@notty"
               ├─session-53.scope
               │ ├─242971 "sshd-session: zuul [priv]"
               │ └─242974 "sshd-session: zuul@notty"
               ├─session-54.scope
               │ ├─243049 "sshd-session: zuul [priv]"
               │ └─243052 "sshd-session: zuul@notty"
               ├─session-56.scope
               │ ├─243121 "sshd-session: zuul [priv]"
               │ └─243131 "sshd-session: zuul@notty"
               ├─session-57.scope
               │ ├─243214 "sshd-session: zuul [priv]"
               │ └─243217 "sshd-session: zuul@notty"
               ├─session-59.scope
               │ ├─243317 "sshd-session: zuul [priv]"
               │ └─243320 "sshd-session: zuul@notty"
               ├─session-60.scope
               │ ├─243597 "sshd-session: zuul [priv]"
               │ └─243600 "sshd-session: zuul@notty"
               ├─session-62.scope
               │ ├─243757 "sshd-session: zuul [priv]"
               │ └─243760 "sshd-session: zuul@notty"
               ├─session-68.scope
               │ ├─244541 "sshd-session: zuul [priv]"
               │ └─244560 "sshd-session: zuul@notty"
               ├─session-70.scope
               │ ├─244705 "sshd-session: zuul [priv]"
               │ └─244708 "sshd-session: zuul@notty"
               ├─session-71.scope
               │ ├─244735 "sshd-session: zuul [priv]"
               │ └─244738 "sshd-session: zuul@notty"
               ├─session-73.scope
               │ ├─244839 "sshd-session: zuul [priv]"
               │ └─244842 "sshd-session: zuul@notty"
               ├─session-74.scope
               │ ├─244869 "sshd-session: zuul [priv]"
               │ └─244872 "sshd-session: zuul@notty"
               ├─session-76.scope
               │ ├─244930 "sshd-session: zuul [priv]"
               │ └─244933 "sshd-session: zuul@notty"
               ├─session-79.scope
               │ ├─245397 "sshd-session: zuul [priv]"
               │ └─245400 "sshd-session: zuul@notty"
               ├─session-81.scope
               │ ├─245500 "sshd-session: zuul [priv]"
               │ └─245503 "sshd-session: zuul@notty"
               ├─session-82.scope
               │ ├─245530 "sshd-session: zuul [priv]"
               │ └─245533 "sshd-session: zuul@notty"
               ├─session-84.scope
               │ ├─245589 "sshd-session: zuul [priv]"
               │ └─245592 "sshd-session: zuul@notty"
               ├─session-85.scope
               │ ├─245619 "sshd-session: zuul [priv]"
               │ └─245634 "sshd-session: zuul@notty"
               ├─session-87.scope
               │ ├─245770 "sshd-session: zuul [priv]"
               │ └─245773 "sshd-session: zuul@notty"
               ├─session-94.scope
               │ ├─246551 "sshd-session: zuul [priv]"
               │ └─246554 "sshd-session: zuul@notty"
               ├─session-96.scope
               │ ├─246719 "sshd-session: zuul [priv]"
               │ └─246722 "sshd-session: zuul@notty"
               ├─session-97.scope
               │ ├─246796 "sshd-session: zuul [priv]"
               │ └─246813 "sshd-session: zuul@notty"
               ├─session-99.scope
               │ ├─247006 "sshd-session: zuul [priv]"
               │ └─247009 "sshd-session: zuul@notty"
               └─user@1000.service
                 ├─app.slice
                 │ └─dbus-broker.service
                 │   ├─12376 /usr/bin/dbus-broker-launch --scope user
                 │   └─12385 dbus-broker --log 4 --controller 9 --machine-id bf0bc0bb03de29b24cba1cc9599cf5d0 --max-bytes 100000000000000 --max-fds 25000000000000 --max-matches 5000000000
                 ├─init.scope
                 │ ├─4312 /usr/lib/systemd/systUnit boot.automount could not be found.
emd --user
                 │ └─4314 "(sd-pam)"
                 └─user.slice
                   └─podman-pause-4a554b53.scope
                     └─12312 catatonit -P

● proc-sys-fs-binfmt_misc.automount - Arbitrary Executable File Formats File System Automount Point
     Loaded: loaded (/usr/lib/systemd/system/proc-sys-fs-binfmt_misc.automount; static)
     Active: active (running) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
   Triggers: ● proc-sys-fs-binfmt_misc.mount
      Where: /proc/sys/fs/binfmt_misc
       Docs: https://docs.kernel.org/admin-guide/binfmt-misc.html
             https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems

Jan 30 20:21:01 compute-1 systemd[1]: proc-sys-fs-binfmt_misc.automount: Got automount request for /proc/sys/fs/binfmt_misc, triggered by 268000 (sos)

● dev-cdrom.device - QEMU_DVD-ROM config-2
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:01.1-ata1-host0-target0:0:0-0:0:0:0-block-sr0.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pci0000:00/0000:00:01.1/ata1/host0/target0:0:0/0:0:0:0/block/sr0

● dev-disk-by\x2ddiskseq-1.device - /dev/disk/by-diskseq/1
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:04.0-virtio2-block-vda.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda

● dev-disk-by\x2ddiskseq-3.device - QEMU_DVD-ROM config-2
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:01.1-ata1-host0-target0:0:0-0:0:0:0-block-sr0.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pci0000:00/0000:00:01.1/ata1/host0/target0:0:0/0:0:0:0/block/sr0

● dev-disk-by\x2did-ata\x2dQEMU_DVD\x2dROM_QM00001.device - QEMU_DVD-ROM config-2
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:01.1-ata1-host0-target0:0:0-0:0:0:0-block-sr0.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pci0000:00/0000:00:01.1/ata1/host0/target0:0:0/0:0:0:0/block/sr0

● dev-disk-by\x2dlabel-config\x2d2.device - QEMU_DVD-ROM config-2
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:01.1-ata1-host0-target0:0:0-0:0:0:0-block-sr0.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pci0000:00/0000:00:01.1/ata1/host0/target0:0:0/0:0:0:0/block/sr0

● dev-disk-by\x2dpartuuid-714daac1\x2d01.device - /dev/disk/by-partuuid/714daac1-01
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:04.0-virtio2-block-vda-vda1.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda/vda1

● dev-disk-by\x2dpath-pci\x2d0000:00:01.1\x2data\x2d1.0.device - QEMU_DVD-ROM config-2
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:01.1-ata1-host0-target0:0:0-0:0:0:0-block-sr0.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pci0000:00/0000:00:01.1/ata1/host0/target0:0:0/0:0:0:0/block/sr0

● dev-disk-by\x2dpath-pci\x2d0000:00:01.1\x2data\x2d1.device - QEMU_DVD-ROM config-2
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:01.1-ata1-host0-target0:0:0-0:0:0:0-block-sr0.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pci0000:00/0000:00:01.1/ata1/host0/target0:0:0/0:0:0:0/block/sr0

● dev-disk-by\x2dpath-pci\x2d0000:00:04.0.device - /dev/disk/by-path/pci-0000:00:04.0
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:04.0-virtio2-block-vda.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda

● dev-disk-by\x2dpath-pci\x2d0000:00:04.0\x2dpart1.device - /dev/disk/by-path/pci-0000:00:04.0-part1
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:04.0-virtio2-block-vda-vda1.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda/vda1

● dev-disk-by\x2dpath-virtio\x2dpci\x2d0000:00:04.0.device - /dev/disk/by-path/virtio-pci-0000:00:04.0
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:04.0-virtio2-block-vda.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda

● dev-disk-by\x2dpath-virtio\x2dpci\x2d0000:00:04.0\x2dpart1.device - /dev/disk/by-path/virtio-pci-0000:00:04.0-part1
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:04.0-virtio2-block-vda-vda1.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda/vda1

● dev-disk-by\x2duuid-2026\x2d01\x2d30\x2d16\x2d48\x2d16\x2d00.device - QEMU_DVD-ROM config-2
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:01.1-ata1-host0-target0:0:0-0:0:0:0-block-sr0.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pci0000:00/0000:00:01.1/ata1/host0/target0:0:0/0:0:0:0/block/sr0

● dev-disk-by\x2duuid-822f14ea\x2d6e7e\x2d41df\x2db0d8\x2dfbe282d9ded8.device - /dev/disk/by-uuid/822f14ea-6e7e-41df-b0d8-fbe282d9ded8
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:04.0-virtio2-block-vda-vda1.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda/vda1

Jan 30 16:48:32 localhost systemd[1]: Found device /dev/disk/by-uuid/822f14ea-6e7e-41df-b0d8-fbe282d9ded8.

● dev-rfkill.device - /dev/rfkill
    Follows: unit currently follows state of sys-devices-virtual-misc-rfkill.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:37 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:37 UTC; 3h 32min ago
     Device: /sys/devices/virtual/misc/rfkill

● dev-sr0.device - QEMU_DVD-ROM config-2
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:01.1-ata1-host0-target0:0:0-0:0:0:0-block-sr0.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pci0000:00/0000:00:01.1/ata1/host0/target0:0:0/0:0:0:0/block/sr0

● dev-ttyS0.device - /dev/ttyS0
    Follows: unit currently follows state of sys-devices-pnp0-00:00-tty-ttyS0.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pnp0/00:00/tty/ttyS0

Jan 30 16:48:36 localhost systemd[1]: Condition check resulted in /dev/ttyS0 being skipped.

● dev-ttyS1.device - /dev/ttyS1
    Follows: unit currently follows state of sys-devices-platform-serial8250-tty-ttyS1.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/platform/serial8250/tty/ttyS1

● dev-ttyS2.device - /dev/ttyS2
    Follows: unit currently follows state of sys-devices-platform-serial8250-tty-ttyS2.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/platform/serial8250/tty/ttyS2

● dev-ttyS3.device - /dev/ttyS3
    Follows: unit currently follows state of sys-devices-platform-serial8250-tty-ttyS3.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/platform/serial8250/tty/ttyS3

● dev-vda.device - /dev/vda
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:04.0-virtio2-block-vda.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda

● dev-vda1.device - /dev/vda1
    Follows: unit currently follows state of sys-devices-pci0000:00-0000:00:04.0-virtio2-block-vda-vda1.device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda/vda1

● sys-devices-pci0000:00-0000:00:01.1-ata1-host0-target0:0:0-0:0:0:0-block-sr0.device - QEMU_DVD-ROM config-2
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pci0000:00/0000:00:01.1/ata1/host0/target0:0:0/0:0:0:0/block/sr0

● sys-devices-pci0000:00-0000:00:03.0-virtio1-net-eth0.device - Virtio network device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pci0000:00/0000:00:03.0/virtio1/net/eth0

● sys-devices-pci0000:00-0000:00:04.0-virtio2-block-vda-vda1.device - /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda/vda1
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda/vda1

● sys-devices-pci0000:00-0000:00:04.0-virtio2-block-vda.device - /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pci0000:00/0000:00:04.0/virtio2/block/vda

● sys-devices-pci0000:00-0000:00:07.0-virtio5-net-eth1.device - Virtio network device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:52:15 UTC; 3h 29min ago
      Until: Fri 2026-01-30 16:52:15 UTC; 3h 29min ago
     Device: /sys/devices/pci0000:00/0000:00:07.0/virtio5/net/eth1

● sys-devices-platform-serial8250-tty-ttyS1.device - /sys/devices/platform/serial8250/tty/ttyS1
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/platform/serial8250/tty/ttyS1

● sys-devices-platform-serial8250-tty-ttyS2.device - /sys/devices/platform/serial8250/tty/ttyS2
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/platform/serial8250/tty/ttyS2

● sys-devices-platform-serial8250-tty-ttyS3.device - /sys/devices/platform/serial8250/tty/ttyS3
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/platform/serial8250/tty/ttyS3

● sys-devices-pnp0-00:00-tty-ttyS0.device - /sys/devices/pnp0/00:00/tty/ttyS0
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pnp0/00:00/tty/ttyS0

● sys-devices-virtual-misc-rfkill.device - /sys/devices/virtual/misc/rfkill
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:37 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:37 UTC; 3h 32min ago
     Device: /sys/devices/virtual/misc/rfkill

● sys-devices-virtual-net-br\x2dex.device - /sys/devices/virtual/net/br-ex
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
      Until: Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
     Device: /sys/devices/virtual/net/br-ex

● sys-devices-virtual-net-br\x2dint.device - /sys/devices/virtual/net/br-int
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:34:06 UTC; 2h 47min ago
      Until: Fri 2026-01-30 17:34:06 UTC; 2h 47min ago
     Device: /sys/devices/virtual/net/br-int

● sys-devices-virtual-net-genev_sys_6081.device - /sys/devices/virtual/net/genev_sys_6081
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:34:06 UTC; 2h 47min ago
      Until: Fri 2026-01-30 17:34:06 UTC; 2h 47min ago
     Device: /sys/devices/virtual/net/genev_sys_6081

● sys-devices-virtual-net-ovs\x2dsystem.device - /sys/devices/virtual/net/ovs-system
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
      Until: Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
     Device: /sys/devices/virtual/net/ovs-system

● sys-devices-virtual-net-tap20be9363\x2d80.device - /sys/devices/virtual/net/tap20be9363-80
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:59:45 UTC; 2h 21min ago
      Until: Fri 2026-01-30 17:59:45 UTC; 2h 21min ago
     Device: /sys/devices/virtual/net/tap20be9363-80

● sys-devices-virtual-net-tap311f9b45\x2d40.device - /sys/devices/virtual/net/tap311f9b45-40
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:59:47 UTC; 2h 21min ago
      Until: Fri 2026-01-30 17:59:47 UTC; 2h 21min ago
     Device: /sys/devices/virtual/net/tap311f9b45-40

● sys-devices-virtual-net-tap98614c39\x2d56.device - /sys/devices/virtual/net/tap98614c39-56
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:59:45 UTC; 2h 21min ago
      Until: Fri 2026-01-30 17:59:45 UTC; 2h 21min ago
     Device: /sys/devices/virtual/net/tap98614c39-56

● sys-devices-virtual-net-tap9d65d93a\x2de7.device - /sys/devices/virtual/net/tap9d65d93a-e7
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 18:01:13 UTC; 2h 20min ago
      Until: Fri 2026-01-30 18:01:13 UTC; 2h 20min ago
     Device: /sys/devices/virtual/net/tap9d65d93a-e7

● sys-devices-virtual-net-tapbdcc71de\x2d2c.device - /sys/devices/virtual/net/tapbdcc71de-2c
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 18:01:13 UTC; 2h 20min ago
      Until: Fri 2026-01-30 18:01:13 UTC; 2h 20min ago
     Device: /sys/devices/virtual/net/tapbdcc71de-2c

● sys-devices-virtual-net-tapf2850036\x2df5.device - /sys/devices/virtual/net/tapf2850036-f5
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:59:45 UTC; 2h 21min ago
      Until: Fri 2026-01-30 17:59:45 UTC; 2h 21min ago
     Device: /sys/devices/virtual/net/tapf2850036-f5

● sys-devices-virtual-net-vlan20.device - /sys/devices/virtual/net/vlan20
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
      Until: Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
     Device: /sys/devices/virtual/net/vlan20

● sys-devices-virtual-net-vlan21.device - /sys/devices/virtual/net/vlan21
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
      Until: Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
     Device: /sys/devices/virtual/net/vlan21

● sys-devices-virtual-net-vlan22.device - /sys/devices/virtual/net/vlan22
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
      Until: Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
     Device: /sys/devices/virtual/net/vlan22

● sys-module-configfs.device - /sys/module/configfs
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/module/configfs

● sys-module-fuse.device - /sys/module/fuse
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/module/fuse

● sys-subsystem-net-devices-br\x2dex.device - /sys/subsystem/net/devices/br-ex
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
      Until: Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
     Device: /sys/devices/virtual/net/br-ex

● sys-subsystem-net-devices-br\x2dint.device - /sys/subsystem/net/devices/br-int
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:34:06 UTC; 2h 47min ago
      Until: Fri 2026-01-30 17:34:06 UTC; 2h 47min ago
     Device: /sys/devices/virtual/net/br-int

● sys-subsystem-net-devices-eth0.device - Virtio network device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
     Device: /sys/devices/pci0000:00/0000:00:03.0/virtio1/net/eth0

● sys-subsystem-net-devices-eth1.device - Virtio network device
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 16:52:15 UTC; 3h 29min ago
      Until: Fri 2026-01-30 16:52:15 UTC; 3h 29min ago
     Device: /sys/devices/pci0000:00/0000:00:07.0/virtio5/net/eth1

● sys-subsystem-net-devices-genev_sys_6081.device - /sys/subsystem/net/devices/genev_sys_6081
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:34:06 UTC; 2h 47min ago
      Until: Fri 2026-01-30 17:34:06 UTC; 2h 47min ago
     Device: /sys/devices/virtual/net/genev_sys_6081

● sys-subsystem-net-devices-ovs\x2dsystem.device - /sys/subsystem/net/devices/ovs-system
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
      Until: Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
     Device: /sys/devices/virtual/net/ovs-system

● sys-subsystem-net-devices-tap20be9363\x2d80.device - /sys/subsystem/net/devices/tap20be9363-80
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:59:45 UTC; 2h 21min ago
      Until: Fri 2026-01-30 17:59:45 UTC; 2h 21min ago
     Device: /sys/devices/virtual/net/tap20be9363-80

● sys-subsystem-net-devices-tap311f9b45\x2d40.device - /sys/subsystem/net/devices/tap311f9b45-40
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:59:47 UTC; 2h 21min ago
      Until: Fri 2026-01-30 17:59:47 UTC; 2h 21min ago
     Device: /sys/devices/virtual/net/tap311f9b45-40

● sys-subsystem-net-devices-tap98614c39\x2d56.device - /sys/subsystem/net/devices/tap98614c39-56
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:59:45 UTC; 2h 21min ago
      Until: Fri 2026-01-30 17:59:45 UTC; 2h 21min ago
     Device: /sys/devices/virtual/net/tap98614c39-56

● sys-subsystem-net-devices-tap9d65d93a\x2de7.device - /sys/subsystem/net/devices/tap9d65d93a-e7
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 18:01:13 UTC; 2h 20min ago
      Until: Fri 2026-01-30 18:01:13 UTC; 2h 20min ago
     Device: /sys/devices/virtual/net/tap9d65d93a-e7

● sys-subsystem-net-devices-tapbdcUnit boot.mount could not be found.
Unit home.mount could not be found.
c71de\x2d2c.device - /sys/subsystem/net/devices/tapbdcc71de-2c
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 18:01:13 UTC; 2h 20min ago
      Until: Fri 2026-01-30 18:01:13 UTC; 2h 20min ago
     Device: /sys/devices/virtual/net/tapbdcc71de-2c

● sys-subsystem-net-devices-tapf2850036\x2df5.device - /sys/subsystem/net/devices/tapf2850036-f5
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:59:45 UTC; 2h 21min ago
      Until: Fri 2026-01-30 17:59:45 UTC; 2h 21min ago
     Device: /sys/devices/virtual/net/tapf2850036-f5

● sys-subsystem-net-devices-vlan20.device - /sys/subsystem/net/devices/vlan20
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
      Until: Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
     Device: /sys/devices/virtual/net/vlan20

● sys-subsystem-net-devices-vlan21.device - /sys/subsystem/net/devices/vlan21
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
      Until: Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
     Device: /sys/devices/virtual/net/vlan21

● sys-subsystem-net-devices-vlan22.device - /sys/subsystem/net/devices/vlan22
     Loaded: loaded
     Active: active (plugged) since Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
      Until: Fri 2026-01-30 17:29:44 UTC; 2h 51min ago
     Device: /sys/devices/virtual/net/vlan22

● -.mount - Root Mount
     Loaded: loaded (/etc/fstab; generated)
     Active: active (mounted)
      Where: /
       What: /dev/vda1
       Docs: man:fstab(5)
             man:systemd-fstab-generator(8)

● dev-hugepages.mount - Huge Pages File System
     Loaded: loaded (/usr/lib/systemd/system/dev-hugepages.mount; static)
     Active: active (mounted) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Where: /dev/hugepages
       What: hugetlbfs
       Docs: https://docs.kernel.org/admin-guide/mm/hugetlbpage.html
             https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 4.0K (peak: 508.0K)
        CPU: 6ms
     CGroup: /dev-hugepages.mount

● dev-hugepages1G.mount - /dev/hugepages1G
     Loaded: loaded (/etc/fstab; generated)
     Active: active (mounted) since Fri 2026-01-30 17:31:46 UTC; 2h 49min ago
      Until: Fri 2026-01-30 17:31:46 UTC; 2h 49min ago
      Where: /dev/hugepages1G
       What: none
       Docs: man:fstab(5)
             man:systemd-fstab-generator(8)

● dev-hugepages2M.mount - /dev/hugepages2M
     Loaded: loaded (/etc/fstab; generated)
     Active: active (mounted) since Fri 2026-01-30 17:31:47 UTC; 2h 49min ago
      Until: Fri 2026-01-30 17:31:47 UTC; 2h 49min ago
      Where: /dev/hugepages2M
       What: none
       Docs: man:fstab(5)
             man:systemd-fstab-generator(8)

● dev-mqueue.mount - POSIX Message Queue File System
     Loaded: loaded (/usr/lib/systemd/system/dev-mqueue.mount; static)
     Active: active (mounted) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Where: /dev/mqueue
       What: mqueue
       Docs: man:mq_overview(7)
             https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 4.0K (peak: 540.0K)
        CPU: 6ms
     CGroup: /dev-mqueue.mount

Jan 30 16:48:35 localhost systemd[1]: Mounted POSIX Message Queue File System.

○ proc-fs-nfsd.mount - NFSD configuration filesystem
     Loaded: loaded (/usr/lib/systemd/system/proc-fs-nfsd.mount; static)
     Active: inactive (dead)
      Where: /proc/fs/nfsd
       What: nfsd

● proc-sys-fs-binfmt_misc.mount - Arbitrary Executable File Formats File System
     Loaded: loaded (/usr/lib/systemd/system/proc-sys-fs-binfmt_misc.mount; disabled; preset: disabled)
     Active: active (mounted) since Fri 2026-01-30 20:21:01 UTC; 22s ago
      Until: Fri 2026-01-30 20:21:01 UTC; 22s ago
TriggeredBy: ● proc-sys-fs-binfmt_misc.automount
      Where: /proc/sys/fs/binfmt_misc
       What: binfmt_misc
       Docs: https://docs.kernel.org/admin-guide/binfmt-misc.html
             https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 8.0K (peak: 540.0K)
        CPU: 6ms
     CGroup: /proc-sys-fs-binfmt_misc.mount

Jan 30 20:21:01 compute-1 systemd[1]: Mounting Arbitrary Executable File Formats File System...
Jan 30 20:21:01 compute-1 systemd[1]: Mounted Arbitrary Executable File Formats File System.

● run-credentials-systemd\x2dsysctl.service.mount - /run/credentials/systemd-sysctl.service
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:26:33 UTC; 2h 54min ago
      Until: Fri 2026-01-30 17:26:33 UTC; 2h 54min ago
      Where: /run/credentials/systemd-sysctl.service
       What: none

● run-credentials-systemd\x2dsysusers.service.mount - /run/credentials/systemd-sysusers.service
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Where: /run/credentials/systemd-sysusers.service
       What: none

● run-credentials-systemd\x2dtmpfiles\x2dsetup.service.mount - /run/credentials/systemd-tmpfiles-setup.service
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Where: /run/credentials/systemd-tmpfiles-setup.service
       What: none

● run-credentials-systemd\x2dtmpfiles\x2dsetup\x2ddev.service.mount - /run/credentials/systemd-tmpfiles-setup-dev.service
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Where: /run/credentials/systemd-tmpfiles-setup-dev.service
       What: none

● run-netns-ovnmeta\x2d20be9363\x2d8d31\x2d4010\x2d8379\x2d2f4db75ec5ee.mount - /run/netns/ovnmeta-20be9363-8d31-4010-8379-2f4db75ec5ee
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:59:45 UTC; 2h 21min ago
      Until: Fri 2026-01-30 17:59:45 UTC; 2h 21min ago
      Where: /run/netns/ovnmeta-20be9363-8d31-4010-8379-2f4db75ec5ee
       What: nsfs

● run-netns-ovnmeta\x2d311f9b45\x2d49be\x2d4345\x2d9ac8\x2da1fe5a0b8a53.mount - /run/netns/ovnmeta-311f9b45-49be-4345-9ac8-a1fe5a0b8a53
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:59:47 UTC; 2h 21min ago
      Until: Fri 2026-01-30 17:59:47 UTC; 2h 21min ago
      Where: /run/netns/ovnmeta-311f9b45-49be-4345-9ac8-a1fe5a0b8a53
       What: nsfs

● run-netns.mount - /run/netns
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:31:16 UTC; 2h 50min ago
      Until: Fri 2026-01-30 17:31:16 UTC; 2h 50min ago
      Where: /run/netns
       What: tmpfs

● run-user-1000.mount - /run/user/1000
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 16:50:03 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:50:03 UTC; 3h 31min ago
      Where: /run/user/1000
       What: tmpfs

● sys-fs-fuse-connections.mount - FUSE Control File System
     Loaded: loaded (/usr/lib/systemd/system/sys-fs-fuse-connections.mount; static)
     Active: active (mounted) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Where: /sys/fs/fuse/connections
       What: fusectl
       Docs: https://docs.kernel.org/filesystems/fuse.html
             https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 4.0K (peak: 540.0K)
        CPU: 3ms
     CGroup: /sys-fs-fuse-connections.mount

Jan 30 16:48:35 localhost systemd[1]: Mounting FUSE Control File System...
Jan 30 16:48:35 localhost systemd[1]: Mounted FUSE ConUnit sysroot.mount could not be found.
trol File System.

● sys-kernel-config.mount - Kernel Configuration File System
     Loaded: loaded (/proc/self/mountinfo; static)
     Active: active (mounted) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
      Where: /sys/kernel/config
       What: configfs
       Docs: https://docs.kernel.org/filesystems/configfs.html
             https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems

● sys-kernel-debug-tracing.mount - /sys/kernel/debug/tracing
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 20:21:00 UTC; 23s ago
      Until: Fri 2026-01-30 20:21:00 UTC; 23s ago
      Where: /sys/kernel/debug/tracing
       What: tracefs

● sys-kernel-debug.mount - Kernel Debug File System
     Loaded: loaded (/usr/lib/systemd/system/sys-kernel-debug.mount; static)
     Active: active (mounted) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Where: /sys/kernel/debug
       What: debugfs
       Docs: https://docs.kernel.org/filesystems/debugfs.html
             https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 28.0K (peak: 524.0K)
        CPU: 5ms
     CGroup: /sys-kernel-debug.mount

Jan 30 16:48:35 localhost systemd[1]: Mounted Kernel Debug File System.

● sys-kernel-tracing.mount - Kernel Trace File System
     Loaded: loaded (/usr/lib/systemd/system/sys-kernel-tracing.mount; static)
     Active: active (mounted) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Where: /sys/kernel/tracing
       What: tracefs
       Docs: https://docs.kernel.org/trace/ftrace.html
             https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 28.0K (peak: 548.0K)
        CPU: 5ms
     CGroup: /sys-kernel-tracing.mount

Jan 30 16:48:35 localhost systemd[1]: Mounted Kernel Trace File System.

○ tmp.mount - Temporary Directory /tmp
     Loaded: loaded (/usr/lib/systemd/system/tmp.mount; disabled; preset: disabled)
     Active: inactive (dead)
      Where: /tmp
       What: tmpfs
       Docs: https://systemd.io/TEMPORARY_DIRECTORIES
             man:file-hierarchy(7)
             https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems

● var-lib-containers-storage-overlay-03ed671e96d0bccef4f983e461212db2463991d511b6d92f58d17e652a0f55bf-merged.mount - /var/lib/containers/storage/overlay/03ed671e96d0bccef4f983e461212db2463991d511b6d92f58d17e652a0f55bf/merged
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 19:32:55 UTC; 48min ago
      Until: Fri 2026-01-30 19:32:55 UTC; 48min ago
      Where: /var/lib/containers/storage/overlay/03ed671e96d0bccef4f983e461212db2463991d511b6d92f58d17e652a0f55bf/merged
       What: overlay

● var-lib-containers-storage-overlay-56cbc7a5845c2f948fa0a06902396cf60f688145d159ab4cefc50c0311f807a7-merged.mount - /var/lib/containers/storage/overlay/56cbc7a5845c2f948fa0a06902396cf60f688145d159ab4cefc50c0311f807a7/merged
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:43:22 UTC; 2h 38min ago
      Until: Fri 2026-01-30 17:43:22 UTC; 2h 38min ago
      Where: /var/lib/containers/storage/overlay/56cbc7a5845c2f948fa0a06902396cf60f688145d159ab4cefc50c0311f807a7/merged
       What: overlay

● var-lib-containers-storage-overlay-72578f25058dc837ad83ba8c3251884e154e0b02f2395e221f12c1cdfb70faf8-merged.mount - /var/lib/containers/storage/overlay/72578f25058dc837ad83ba8c3251884e154e0b02f2395e221f12c1cdfb70faf8/merged
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:43:36 UTC; 2h 37min ago
      Until: Fri 2026-01-30 17:43:36 UTC; 2h 37min ago
      Where: /var/lib/containers/storage/overlay/72578f25058dc837ad83ba8c3251884e154e0b02f2395e221f12c1cdfb70faf8/merged
       What: overlay

● var-lib-containers-storage-overlay-a81ee3b6db02f1e8b888f7d6d544e298f294dce221a720bfbcb0453cbca17897-merged.mount - /var/lib/containers/storage/overlay/a81ee3b6db02f1e8b888f7d6d544e298f294dce221a720bfbcb0453cbca17897/merged
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:59:47 UTC; 2h 21min ago
      Until: Fri 2026-01-30 17:59:47 UTC; 2h 21min ago
      Where: /var/lib/containers/storage/overlay/a81ee3b6db02f1e8b888f7d6d544e298f294dce221a720bfbcb0453cbca17897/merged
       What: overlay

● var-lib-containers-storage-overlay-dd2a7abd179624b7171e70bc40b45c87e052438704c0e0add0c6dbb505d3062a-merged.mount - /var/lib/containers/storage/overlay/dd2a7abd179624b7171e70bc40b45c87e052438704c0e0add0c6dbb505d3062a/merged
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:59:48 UTC; 2h 21min ago
      Until: Fri 2026-01-30 17:59:48 UTC; 2h 21min ago
      Where: /var/lib/containers/storage/overlay/dd2a7abd179624b7171e70bc40b45c87e052438704c0e0add0c6dbb505d3062a/merged
       What: overlay

● var-lib-containers-storage-overlay-e330022d1d5fec03687624ee194c35ec2d6efeb303f1bcafaa6436c202bba79c-merged.mount - /var/lib/containers/storage/overlay/e330022d1d5fec03687624ee194c35ec2d6efeb303f1bcafaa6436c202bba79c/merged
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:35:02 UTC; 2h 46min ago
      Until: Fri 2026-01-30 17:35:02 UTC; 2h 46min ago
      Where: /var/lib/containers/storage/overlay/e330022d1d5fec03687624ee194c35ec2d6efeb303f1bcafaa6436c202bba79c/merged
       What: overlay

● var-lib-containers-storage-overlay-ec0cb74c410a0de652c49b5a28304df0ffe117de48bd162b7fb66ae6c3935158-merged.mount - /var/lib/containers/storage/overlay/ec0cb74c410a0de652c49b5a28304df0ffe117de48bd162b7fb66ae6c3935158/merged
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:42:21 UTC; 2h 39min ago
      Until: Fri 2026-01-30 17:42:21 UTC; 2h 39min ago
      Where: /var/lib/containers/storage/overlay/ec0cb74c410a0de652c49b5a28304df0ffe117de48bd162b7fb66ae6c3935158/merged
       What: overlay

● var-lib-containers-storage-overlay-f4bd1562db2fea3ea6205cd49304bc40bbfc0ff1082df396c7d418007837ef6f-merged.mount - /var/lib/containers/storage/overlay/f4bd1562db2fea3ea6205cd49304bc40bbfc0ff1082df396c7d418007837ef6f/merged
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:44:14 UTC; 2h 37min ago
      Until: Fri 2026-01-30 17:44:14 UTC; 2h 37min ago
      Where: /var/lib/containers/storage/overlay/f4bd1562db2fea3ea6205cd49304bc40bbfc0ff1082df396c7d418007837ef6f/merged
       What: overlay

● var-lib-containers-storage-overlay-f4f785da56b08df91874e224f2bb154f5c5d6320dbc77d4d7272701afbb51000-merged.mount - /var/lib/containers/storage/overlay/f4f785da56b08df91874e224f2bb154f5c5d6320dbc77d4d7272701afbb51000/merged
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:43:54 UTC; 2h 37min ago
      Until: Fri 2026-01-30 17:43:54 UTC; 2h 37min ago
      Where: /var/lib/containers/storage/overlay/f4f785da56b08df91874e224f2bb154f5c5d6320dbc77d4d7272701afbb51000/merged
       What: overlay

● var-lib-containers-storage-overlay.mount - /var/lib/containers/storage/overlay
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:34:05 UTC; 2h 47min ago
      Until: Fri 2026-01-30 17:34:05 UTC; 2h 47min ago
      Where: /var/lib/containers/storage/overlay
       What: /dev/vda1

● var-lib-containers-storage-overlay\x2dcontainers-407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a-userdata-shm.mount - /var/lib/containers/storage/overlay-containers/407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a/userdata/shm
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:43:36 UTC; 2h 37min ago
      Until: Fri 2026-01-30 17:43:36 UTC; 2h 37min ago
      Where: /var/lib/containers/storage/overlay-containers/407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a/userdata/shm
       What: shm

● var-lib-containers-storage-overlay\x2dcontainers-455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b-userdata-shm.mount - /var/lib/containers/storage/overlay-containers/455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b/userdata/shm
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:42:21 UTC; 2h 39min ago
      Until: Fri 2026-01-30 17:42:21 UTC; 2h 39min ago
      Where: /var/lib/containers/storage/overlay-containers/455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b/userdata/shm
       What: shm

● var-lib-containers-storage-overlay\x2dcontainers-4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd-userdata-shm.mount - /var/lib/containers/storage/overlay-containers/4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd/userdata/shm
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:43:22 UTC; 2h 38min ago
      Until: Fri 2026-01-30 17:43:22 UTC; 2h 38min ago
      Where: /var/lib/containers/storage/overlay-containers/4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd/userdata/shm
       What: shm

● var-lib-containers-storage-overlay\x2dcontainers-834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20-userdata-shm.mount - /var/lib/containers/storage/overlay-containers/834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20/userdata/shm
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:35:02 UTC; 2h 46min ago
      Until: Fri 2026-01-30 17:35:02 UTC; 2h 46min ago
      Where: /var/lib/containers/storage/overlay-containers/834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20/userdata/shm
       What: shm

● var-lib-containers-storage-overlay\x2dcontainers-883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037-userdata-shm.mount - /var/lib/containers/storage/overlay-containers/883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037/userdata/shm
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 19:32:55 UTC; 48min ago
      Until: Fri 2026-01-30 19:32:55 UTC; 48min ago
      Where: /var/lib/containers/storage/overlay-containers/883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037/userdata/shm
       What: shm

● var-lib-containers-storage-overlay\x2dcontainers-bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98-userdata-shm.mount - /var/lib/containers/storage/overlay-containers/bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98/userdata/shm
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:43:54 UTC; 2h 37min ago
      Until: Fri 2026-01-30 17:43:54 UTC; 2h 37min ago
      Where: /var/lib/containers/storage/overlay-containers/bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98/userdata/shm
       What: shm

● var-lib-containers-storage-overlay\x2dcontainers-cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18-userdata-shm.mount - /var/lib/containers/storage/overlay-containers/cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18/userdata/shm
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:59:47 UTC; 2h 21min ago
      Until: Fri 2026-01-30 17:59:47 UTC; 2h 21min ago
      Where: /var/lib/containers/storage/overlay-containers/cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18/userdata/shm
       What: shm

● var-lib-containers-storage-overlay\x2dcontainers-ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab-userdata-shm.mount - /var/lib/containers/storage/overlay-containers/ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab/userdata/shm
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:59:48 UTC; 2h 21min ago
      Until: Fri 2026-01-30 17:59:48 UTC; 2h 21min ago
      Where: /var/lib/containers/storage/overlay-containers/ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab/userdata/shm
       What: shm

● var-lib-containers-storage-overlay\x2dcontainers-e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a-userdata-shm.mount - /var/lib/containers/storage/overlay-containers/e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a/userdata/shm
     Loaded: loaded (/proc/self/mountinfo)
     Active: active (mounted) since Fri 2026-01-30 17:44:14 UTC; 2h 37min ago
      Until: Fri 2026-01-30 17:44:14 UTC; 2h 37min ago
      Where: /var/lib/containers/storage/overlay-containers/e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a/userdata/shm
       What: shm

○ var-lib-machines.mount - Virtual Machine and Container Storage (Compatibility)
     Loaded: loaded (/usr/lib/systemd/system/var-lib-machines.mount; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 17:39:34 UTC; 2h 41min ago
      Where: /var/lib/machines
       What: /var/lib/machines.raw

Jan 30 17:39:34 compute-1 systemd[1]: Virtual Machine and Container Storage (Compatibility) was skipped because of an unmet condition check (ConditionPathExists=/var/lib/machines.raw).

● var-lib-nfs-rpc_pipefs.mount - RPC Pipe File System
     Loaded: loaded (/proc/self/mountinfo; static)
     Active: active (mounted) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
      Where: /var/lib/nfs/rpc_pipefs
       What: rpc_pipefs

● systemd-ask-password-console.path - Dispatch Password Requests to Console Directory Watch
     Loaded: loaded (/usr/lib/systemd/system/systemd-ask-password-console.path; static)
     Active: active (waiting) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
   Triggers: ● systemd-ask-password-console.service
       Docs: man:systemd-ask-password-console.path(8)

● systemd-ask-password-wall.path - Forward Password Requests to Wall Directory Watch
     Loaded: loaded (/usr/lib/systemd/system/systemd-ask-password-wall.path; static)
     Active: active (waiting) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
   Triggers: ● systemd-ask-password-wall.service
       Docs: man:systemd-ask-password-wall.path(8)

● init.scope - System and Service Manager
     Loaded: loaded
  Transient: yes
     Active: active (running) since Fri 2026-01-30 16:48:31 UTC; 3h 32min ago
       Docs: man:systemd(1)
         IO: 476.0K read, 0B written
      Tasks: 1 (limit: 100092)
     Memory: 42.3M (peak: 62.3M)
        CPU: 53.655s
     CGroup: /init.scope
             └─1 /usr/lib/systemd/systemd --switched-root --system --deserialize 31

Jan 30 20:21:01 compute-1 systemd[1]: proc-sys-fs-binfmt_misc.automount: Got automount request for /proc/sys/fs/binfmt_misc, triggered by 268000 (sos)
Jan 30 20:21:01 compute-1 systemd[1]: Mounting Arbitrary Executable File Formats File System...
Jan 30 20:21:01 compute-1 systemd[1]: Mounted Arbitrary Executable File Formats File System.
Jan 30 20:21:05 compute-1 systemd[1]: Starting Hostname Service...
Jan 30 20:21:05 compute-1 systemd[1]: Started Hostname Service.
Jan 30 20:21:06 compute-1 systemd[1]: Started Session 163 of User zuul.
Jan 30 20:21:06 compute-1 systemd[1]: Started Session 164 of User zuul.
Jan 30 20:21:06 compute-1 systemd[1]: session-163.scope: Deactivated successfully.
Jan 30 20:21:06 compute-1 systemd[1]: session-164.scope: Deactivated successfully.
Jan 30 20:21:14 compute-1 systemd[1]: var-lib-containers-storage-overlay-volatile\x2dcheck1692682708-merged.mount: Deactivated successfully.

● libpod-407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a.scope - libcrun container
     Loaded: loaded (/run/systemd/transient/libpod-407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 17:43:36 UTC; 2h 37min ago
         IO: 0B read, 0B written
      Tasks: 6 (limit: 4096)
     Memory: 11.2M (peak: 13.0M)
        CPU: 2.568s
     CGroup: /machine.slice/libpod-407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a.scope
             └─container
               └─196153 /bin/node_exporter --web.config.file=/etc/node_exporter/node_exporter.yaml --web.disable-exporter-metrics --collector.systemd "--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service" --no-collector.dmi --no-collector.entropy --no-collector.thermal_zone --no-collector.time --no-collector.timex --no-collector.uname --no-collector.stat --no-collector.hwmon --no-collector.os --no-collector.selinux --no-collector.textfile --no-collector.powersupplyclass --no-collector.pressure --no-collector.rapl

Jan 30 17:43:36 compute-1 systemd[1]: Started libcrun container.

● libpod-455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b.scope - libcrun container
     Loaded: loaded (/run/systemd/transient/libpod-455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 17:42:21 UTC; 2h 39min ago
         IO: 192.0K read, 2.2G written
      Tasks: 31 (limit: 4096)
     Memory: 248.4M (peak: 1003.1M)
        CPU: 3min 34.404s
     CGroup: /machine.slice/libpod-455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b.scope
             └─container
               ├─183473 dumb-init --single-child -- kolla_start
               ├─183475 /usr/bin/python3 /usr/bin/nova-compute
               ├─213354 /usr/bin/python3 /bin/privsep-helper --config-file /etc/nova/nova.conf --config-file /etc/nova/nova-compute.conf --config-dir /etc/nova/nova.conf.d --privsep_context nova.privsep.sys_admin_pctxt --privsep_sock_path /tmp/tmp33az_fjs/privsep.sock
               └─213418 /usr/bin/python3 /bin/privsep-helper --config-file /etc/nova/nova.conf --config-file /etc/nova/nova-compute.conf --config-dir /etc/nova/nova.conf.d --privsep_context vif_plug_ovs.privsep.vif_plug --privsep_sock_path /tmp/tmptmqu8bjz/privsep.sock

Jan 30 17:42:21 compute-1 systemd[1]: Started libcrun container.

● libpod-4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd.scope - libcrun container
     Loaded: loaded (/run/systemd/transient/libpod-4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 17:43:22 UTC; 2h 38min ago
         IO: 0B read, 232.0K written
      Tasks: 7 (limit: 4096)
     Memory: 105.1M (peak: 106.9M)
        CPU: 12.934s
     CGroup: /machine.slice/libpod-4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd.scope
             └─container
               ├─193155 dumb-init --single-child -- kolla_start
               ├─193158 "ceilometer-polling: master process [/usr/bin/ceilometer-polling --polling-namespaces compute --logfile /dev/stdout]"
               └─193336 "ceilometer-polling: AgentManager worker(0)"

Jan 30 17:43:22 compute-1 systemd[1]: Started libcrun container.
Jan 30 17:43:22 compute-1 sudo[193159]: ceilometer : PWD=/ ; USER=root ; COMMAND=/usr/local/bin/kolla_set_configs
Jan 30 17:43:22 compute-1 sudo[193159]: pam_systemd(sudo:session): Failed to connect to system bus: No such file or directory
Jan 30 17:43:22 compute-1 sudo[193159]: pam_unix(sudo:session): session opened for user root(uid=0) by (uid=42405)
Jan 30 17:43:22 compute-1 sudo[193159]: pam_unix(sudo:session): session closed for user root
Jan 30 17:43:22 compute-1 sudo[193182]: ceilometer : PWD=/ ; USER=root ; COMMAND=/usr/local/bin/kolla_copy_cacerts
Jan 30 17:43:22 compute-1 sudo[193182]: pam_systemd(sudo:session): Failed to connect to system bus: No such file or directory
Jan 30 17:43:22 compute-1 sudo[193182]: pam_unix(sudo:session): session opened for user root(uid=0) by (uid=42405)
Jan 30 17:43:22 compute-1 sudo[193182]: pam_unix(sudo:session): session closed for user root

● libpod-834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20.scope - libcrun container
     Loaded: loaded (/run/systemd/transient/libpod-834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20.scope; transient)
  Transient: yes
    Drop-In: /run/systemd/transient/libpod-834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20.scope.d
             └─dep.conf
     Active: active (running) since Fri 2026-01-30 17:35:03 UTC; 2h 46min ago
         IO: 0B read, 29.0M written
      Tasks: 11 (limit: 4096)
     Memory: 427.5M (peak: 469.8M)
        CPU: 1min 12.613s
     CGroup: /machine.slice/libpod-834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20.scope
             └─container
               ├─105006 dumb-init --single-child -- kolla_start
               ├─105009 "neutron-ovn-metadata-agent (/usr/bin/python3 /usr/bin/neutron-ovn-metadata-agent)"
               ├─105396 "neutron-ovn-metadata-agent (/usr/bin/python3 /usr/bin/neutron-ovn-metadata-agent)"
               ├─105526 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.namespace_cmd --privsep_sock_path /tmp/tmph8sg2l6q/privsep.sock
               ├─212908 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.default --privsep_sock_path /tmp/tmpu4b226u9/privsep.sock
               └─213561 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.link_cmd --privsep_sock_path /tmp/tmpg6ekdec0/privsep.sock

Jan 30 20:18:27 compute-1 podman[266909]: 2026-01-30 20:18:27.150727056 +0000 UTC m=+0.064998252 container died 4b436a221e2cbd07662d7ed336236a5b82411ca4b6cf1eb2a9cd2472b6deacbb (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-33a85eb7-4238-4ed0-8000-6c9d0bb12f10, org.label-schema.vendor=CentOS, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, io.buildah.version=1.41.3, org.label-schema.build-date=20260127, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0)
Jan 30 20:18:27 compute-1 podman[266909]: 2026-01-30 20:18:27.1865287 +0000 UTC m=+0.100799896 container cleanup 4b436a221e2cbd07662d7ed336236a5b82411ca4b6cf1eb2a9cd2472b6deacbb (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-33a85eb7-4238-4ed0-8000-6c9d0bb12f10, org.label-schema.vendor=CentOS, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, org.label-schema.license=GPLv2, tcib_managed=true, org.label-schema.build-date=20260127, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0)
Jan 30 20:18:27 compute-1 podman[266956]: 2026-01-30 20:18:27.265546957 +0000 UTC m=+0.053742423 container remove 4b436a221e2cbd07662d7ed336236a5b82411ca4b6cf1eb2a9cd2472b6deacbb (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-33a85eb7-4238-4ed0-8000-6c9d0bb12f10, tcib_managed=true, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, org.label-schema.build-date=20260127, maintainer=OpenStack Kubernetes Operator team, org.label-schema.vendor=CentOS)
Jan 30 20:19:56 compute-1 podman[267559]: 2026-01-30 20:19:56.328119219 +0000 UTC m=+0.083906267 container create 1716d37968352c0203286fc8a43824cdeea3ac700ae1587b961e853a638cb90c (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-b785f2d9-25d0-4cca-b23e-d2683fa2d149, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20260127, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image)
Jan 30 20:19:56 compute-1 podman[267559]: 2026-01-30 20:19:56.293592651 +0000 UTC m=+0.049379699 image pull 19964fda6b912d3d57e21b0bcc221725d936e513025030cb508474fe04b06af8 quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified
Jan 30 20:19:56 compute-1 podman[267559]: 2026-01-30 20:19:56.433270907 +0000 UTC m=+0.189057955 container init 1716d37968352c0203286fc8a43824cdeea3ac700ae1587b961e853a638cb90c (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-b785f2d9-25d0-4cca-b23e-d2683fa2d149, org.label-schema.build-date=20260127, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, tcib_managed=true, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0)
Jan 30 20:19:56 compute-1 podman[267559]: 2026-01-30 20:19:56.440528192 +0000 UTC m=+0.196315250 container start 1716d37968352c0203286fc8a43824cdeea3ac700ae1587b961e853a638cb90c (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-b785f2d9-25d0-4cca-b23e-d2683fa2d149, org.label-schema.schema-version=1.0, org.label-schema.build-date=20260127, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, tcib_managed=true, maintainer=OpenStack Kubernetes Operator team, io.buildah.version=1.41.3, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS)
Jan 30 20:20:21 compute-1 podman[267696]: 2026-01-30 20:20:21.57347368 +0000 UTC m=+0.061943454 container died 1716d37968352c0203286fc8a43824cdeea3ac700ae1587b961e853a638cb90c (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-b785f2d9-25d0-4cca-b23e-d2683fa2d149, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.build-date=20260127, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true)
Jan 30 20:20:21 compute-1 podman[267696]: 2026-01-30 20:20:21.621727486 +0000 UTC m=+0.110197250 container cleanup 1716d37968352c0203286fc8a43824cdeea3ac700ae1587b961e853a638cb90c (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-b785f2d9-25d0-4cca-b23e-d2683fa2d149, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20260127, org.label-schema.schema-version=1.0, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true)
Jan 30 20:20:21 compute-1 podman[267741]: 2026-01-30 20:20:21.701020962 +0000 UTC m=+0.056977825 container remove 1716d37968352c0203286fc8a43824cdeea3ac700ae1587b961e853a638cb90c (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-b785f2d9-25d0-4cca-b23e-d2683fa2d149, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, tcib_managed=true, org.label-schema.build-date=20260127, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS)

● libpod-883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037.scope - libcrun container
     Loaded: loaded (/run/systemd/transient/libpod-883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037.scope; transient)
  Transient: yes
    Drop-In: /run/systemd/transient/libpod-883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037.scope.d
             └─dep.conf
     Active: active (running) since Fri 2026-01-30 19:32:55 UTC; 48min ago
         IO: 0B read, 4.0K written
      Tasks: 6 (limit: 4096)
     Memory: 17.8M (peak: 20.9M)
        CPU: 7.704s
     CGroup: /machine.slice/libpod-883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037.scope
             └─container
               ├─251554 dumb-init --single-child -- kolla_start
               └─251557 /usr/bin/ovn-controller --pidfile unix:/run/openvswitch/db.sock -p /etc/pki/tls/private/ovndb.key -c /etc/pki/tls/certs/ovndb.crt -C /etc/pki/tls/certs/ovndbca.crt

Jan 30 19:32:55 compute-1 systemd[1]: Started libcrun container.

● libpod-bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98.scope - libcrun container
     Loaded: loaded (/run/systemd/transient/libpod-bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 17:43:54 UTC; 2h 37min ago
         IO: 0B read, 0B written
      Tasks: 7 (limit: 4096)
     Memory: 10.4M (peak: 12.5M)
        CPU: 3.264s
     CGroup: /machine.slice/libpod-bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98.scope
             └─container
               └─199285 /bin/podman_exporter --web.config.file=/etc/podman_exporter/podman_exporter.yaml

Jan 30 17:43:54 compute-1 systemd[1]: Started libcrun container.

● libpod-cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18.scope - libcrun container
     Loaded: loaded (/run/systemd/transient/libpod-cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 17:59:47 UTC; 2h 21min ago
         IO: 0B read, 4.0K written
      Tasks: 10 (limit: 4096)
     Memory: 8.5M (peak: 9.2M)
        CPU: 159ms
     CGroup: /machine.slice/libpod-cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18.scope
             └─container
               ├─216789 dumb-init --single-child -- /bin/bash -c "exec /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/20be9363-8d31-4010-8379-2f4db75ec5ee.conf"
               ├─216791 /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/20be9363-8d31-4010-8379-2f4db75ec5ee.conf
               └─216793 /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/20be9363-8d31-4010-8379-2f4db75ec5ee.conf

Jan 30 17:59:47 compute-1 systemd[1]: Started libcrun container.

● libpod-conmon-cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18.scope
     Loaded: loaded (/run/systemd/transient/libpod-conmon-cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 17:59:47 UTC; 2h 21min ago
         IO: 0B read, 0B written
      Tasks: 1 (limit: 100092)
     Memory: 340.0K (peak: 1.9M)
        CPU: 18ms
     CGroup: /machine.slice/libpod-conmon-cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18.scope
             └─216787 /usr/bin/conmon --api-version 1 -c cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18 -u cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18/userdata -p /run/containers/storage/overlay-containers/cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18/userdata/pidfile -n neutron-haproxy-ovnmeta-20be9363-8d31-4010-8379-2f4db75ec5ee --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18/userdata/oci-log --conmon-pidfile /run/containers/storage/overlay-containers/cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18

Jan 30 17:59:47 compute-1 systemd[1]: Started libpod-conmon-cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18.scope.
Jan 30 17:59:47 compute-1 neutron-haproxy-ovnmeta-20be9363-8d31-4010-8379-2f4db75ec5ee[216787]: [NOTICE]   (216791) : New worker (216793) forked
Jan 30 17:59:47 compute-1 neutron-haproxy-ovnmeta-20be9363-8d31-4010-8379-2f4db75ec5ee[216787]: [NOTICE]   (216791) : Loading success.

● libpod-conmon-ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab.scope
     Loaded: loaded (/run/systemd/transient/libpod-conmon-ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 17:59:48 UTC; 2h 21min ago
         IO: 0B read, 0B written
      Tasks: 1 (limit: 100092)
     Memory: 324.0K (peak: 1.7M)
        CPU: 11ms
     CGroup: /machine.slice/libpod-conmon-ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab.scope
             └─216861 /usr/bin/conmon --api-version 1 -c ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab -u ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab/userdata -p /run/containers/storage/overlay-containers/ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab/userdata/pidfile -n neutron-haproxy-ovnmeta-311f9b45-49be-4345-9ac8-a1fe5a0b8a53 --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab/userdata/oci-log --conmon-pidfile /run/containers/storage/overlay-containers/ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab

Jan 30 17:59:48 compute-1 systemd[1]: Started libpod-conmon-ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab.scope.
Jan 30 17:59:48 compute-1 neutron-haproxy-ovnmeta-311f9b45-49be-4345-9ac8-a1fe5a0b8a53[216861]: [NOTICE]   (216866) : New worker (216868) forked
Jan 30 17:59:48 compute-1 neutron-haproxy-ovnmeta-311f9b45-49be-4345-9ac8-a1fe5a0b8a53[216861]: [NOTICE]   (216866) : Loading success.

● libpod-ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab.scope - libcrun container
     Loaded: loaded (/run/systemd/transient/libpod-ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 17:59:48 UTC; 2h 21min ago
         IO: 0B read, 4.0K written
      Tasks: 10 (limit: 4096)
     Memory: 8.7M (peak: 10.5M)
        CPU: 143ms
     CGroup: /machine.slice/libpod-ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab.scope
             └─container
               ├─216863 dumb-init --single-child -- /bin/bash -c "exec /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/311f9b45-49be-4345-9ac8-a1fe5a0b8a53.conf"
               ├─216866 /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/311f9b45-49be-4345-9ac8-a1fe5a0b8a53.conf
               └─216868 /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/311f9b45-49be-4345-9ac8-a1fe5a0b8a53.conf

Jan 30 17:59:48 compute-1 systemd[1]: Started libcrun container.

● libpod-e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a.scope - libcrun container
     Loaded: loaded (/run/systemd/transient/libpod-e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 17:44:14 UTC; 2h 37min ago
         IO: 0B read, 0B written
      Tasks: 6 (limit: 4096)
     Memory: 5.2M (peak: 7.2M)
        CPU: 4.362s
     CGroup: /machine.slice/libpod-e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a.scope
             └─container
               └─202410 /app/openstack-network-exporter

Jan 30 17:44:14 compute-1 systemd[1]: Started libcrun container.

● machine-qemu\x2d7\x2dinstance\x2d0000000e.scope - Virtual Machine qemu-7-instance-0000000e
     Loaded: loaded (/run/systemd/transient/machine-qemu\x2d7\x2dinstance\x2d0000000e.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 17:59:45 UTC; 2h 21min ago
         IO: 23.7M read, 28.3M written
      Tasks: 8 (limit: 16384)
     Memory: 254.7M (peak: 255.4M)
        CPU: 6min 13.174s
     CGroup: /machine.slice/machine-qemu\x2d7\x2dinstance\x2d0000000e.scope
             └─libvirt
               └─216693 /usr/libexec/qemu-kvm -name guest=instance-0000000e,debug-threads=on -S -object "{\"qom-type\":\"secret\",\"id\":\"masterKey0\",\"format\":\"raw\",\"file\":\"/var/lib/libvirt/qemu/domain-7-instance-0000000e/master-key.aes\"}" -machine pc-q35-rhel9.8.0,usb=off,dump-guest-core=off,memory-backend=pc.ram,hpet=off,acpi=on -accel kvm -cpu EPYC-Rome,x2apic=on,tsc-deadline=on,hypervisor=on,tsc-adjust=on,spec-ctrl=on,stibp=on,ssbd=on,cmp-legacy=on,overflow-recov=on,succor=on,ibrs=on,amd-ssbd=on,virt-ssbd=on,lbrv=on,tsc-scale=on,vmcb-clean=on,flushbyasid=on,pause-filter=on,pfthreshold=on,svme-addr-chk=on,lfence-always-serializing=on,xsaves=off -m size=131072k -object "{\"qom-type\":\"memory-backend-ram\",\"id\":\"pc.ram\",\"size\":134217728}" -overcommit mem-lock=off -smp 1,sockets=1,dies=1,clusters=1,cores=1,threads=1 -uuid 79f0f99a-6f26-49ed-ac15-d149313db321 -smbios "type=1,manufacturer=RDO,product=OpenStack Compute,version=27.5.2-0.20260127144738.eaa65f0.el9,serial=79f0f99a-6f26-49ed-ac15-d149313db321,uuid=79f0f99a-6f26-49ed-ac15-d149313db321,family=Virtual Machine" -no-user-config -nodefaults -chardev socket,id=charmonitor,fd=28,server=on,wait=off -mon chardev=charmonitor,id=monitor,mode=control -rtc base=utc,driftfix=slew -global kvm-pit.lost_tick_policy=delay -no-shutdown -boot strict=on -device "{\"driver\":\"pcie-root-port\",\"port\":16,\"chassis\":1,\"id\":\"pci.1\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":17,\"chassis\":2,\"id\":\"pci.2\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":18,\"chassis\":3,\"id\":\"pci.3\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":19,\"chassis\":4,\"id\":\"pci.4\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":20,\"chassis\":5,\"id\":\"pci.5\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":21,\"chassis\":6,\"id\":\"pci.6\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":22,\"chassis\":7,\"id\":\"pci.7\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":23,\"chassis\":8,\"id\":\"pci.8\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":24,\"chassis\":9,\"id\":\"pci.9\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":25,\"chassis\":10,\"id\":\"pci.10\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":26,\"chassis\":11,\"id\":\"pci.11\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":27,\"chassis\":12,\"id\":\"pci.12\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":28,\"chassis\":13,\"id\":\"pci.13\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":29,\"chassis\":14,\"id\":\"pci.14\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":30,\"chassis\":15,\"id\":\"pci.15\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":31,\"chassis\":16,\"id\":\"pci.16\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":32,\"chassis\":17,\"id\":\"pci.17\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":33,\"chassis\":18,\"id\":\"pci.18\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":34,\"chassis\":19,\"id\":\"pci.19\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":35,\"chassis\":20,\"id\":\"pci.20\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":36,\"chassis\":21,\"id\":\"pci.21\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":37,\"chassis\":22,\"id\":\"pci.22\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":38,\"chassis\":23,\"id\":\"pci.23\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":39,\"chassis\":24,\"id\":\"pci.24\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":40,\"chassis\":25,\"id\":\"pci.25\",\"bus\":\"pcie.0\",\"addr\":\"0x5\"}" -device "{\"driver\":\"pcie-pci-bridge\",\"id\":\"pci.26\",\"bus\":\"pci.1\",\"addr\":\"0x0\"}" -device "{\"driver\":\"piix3-usb-uhci\",\"id\":\"usb\",\"bus\":\"pci.26\",\"addr\":\"0x1\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/_base/d8b1339cb6a58d9c630b446503c76b090a4f368e\",\"node-name\":\"libvirt-3-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/79f0f99a-6f26-49ed-ac15-d149313db321/disk\",\"node-name\":\"libvirt-2-storage\",\"auto-read-only\":true,\"discard\":\"unmap\",\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"node-name\":\"libvirt-2-format\",\"read-only\":false,\"cache\":{\"direct\":true,\"no-flush\":false},\"driver\":\"qcow2\",\"file\":\"libvirt-2-storage\",\"backing\":\"libvirt-3-storage\"}" -device "{\"driver\":\"virtio-blk-pci\",\"bus\":\"pci.4\",\"addr\":\"0x0\",\"drive\":\"libvirt-2-format\",\"id\":\"virtio-disk0\",\"bootindex\":1,\"write-cache\":\"on\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/79f0f99a-6f26-49ed-ac15-d149313db321/disk.config\",\"node-name\":\"libvirt-1-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -device "{\"driver\":\"ide-cd\",\"bus\":\"ide.0\",\"drive\":\"libvirt-1-storage\",\"id\":\"sata0-0-0\",\"write-cache\":\"on\"}" -netdev "{\"type\":\"tap\",\"fd\":\"32\",\"vhost\":true,\"vhostfd\":\"37\",\"id\":\"hostnet0\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet0\",\"id\":\"net0\",\"mac\":\"fa:16:3e:28:e9:1d\",\"bus\":\"pci.2\",\"addr\":\"0x0\"}" -netdev "{\"type\":\"tap\",\"fd\":\"38\",\"vhost\":true,\"vhostfd\":\"39\",\"id\":\"hostnet1\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet1\",\"id\":\"net1\",\"mac\":\"fa:16:3e:0b:98:69\",\"bus\":\"pci.3\",\"addr\":\"0x0\"}" -add-fd set=0,fd=30,opaque=serial0-log -chardev pty,id=charserial0,logfile=/dev/fdset/0,logappend=on -device "{\"driver\":\"isa-serial\",\"chardev\":\"charserial0\",\"id\":\"serial0\",\"index\":0}" -device "{\"driver\":\"usb-tablet\",\"id\":\"input0\",\"bus\":\"usb.0\",\"port\":\"1\"}" -audiodev "{\"id\":\"audio1\",\"driver\":\"none\"}" -object "{\"qom-type\":\"tls-creds-x509\",\"id\":\"vnc-tls-creds0\",\"dir\":\"/etc/pki/qemu\",\"endpoint\":\"server\",\"verify-peer\":true}" -vnc "[::0]:0,tls-creds=vnc-tls-creds0,audiodev=audio1" -device "{\"driver\":\"virtio-vga\",\"id\":\"video0\",\"max_outputs\":1,\"bus\":\"pcie.0\",\"addr\":\"0x1\"}" -global ICH9-LPC.noreboot=off -watchdog-action reset -device "{\"driver\":\"virtio-balloon-pci\",\"id\":\"balloon0\",\"bus\":\"pci.5\",\"addr\":\"0x0\"}" -object "{\"qom-type\":\"rng-random\",\"id\":\"objrng0\",\"filename\":\"/dev/urandom\"}" -device "{\"driver\":\"virtio-rng-pci\",\"rng\":\"objrng0\",\"id\":\"rng0\",\"bus\":\"pci.6\",\"addr\":\"0x0\"}" -device "{\"driver\":\"vmcoreinfo\"}" -sandbox on,obsolete=deny,elevateprivileges=deny,spawn=deny,resourcecontrol=deny -msg timestamp=on

Jan 30 17:59:45 compute-1 systemd[1]: Started Virtual Machine qemu-7-instance-0000000e.

● machine-qemu\x2d8\x2dinstance\x2d00000010.scope - Virtual Machine qemu-8-instance-00000010
     Loaded: loaded (/run/systemd/transient/machine-qemu\x2d8\x2dinstance\x2d00000010.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 18:01:13 UTC; 2h 20min ago
         IO: 24.9M read, 28.3M written
      Tasks: 8 (limit: 16384)
     Memory: 254.6M (peak: 255.1M)
        CPU: 6min 11.247s
     CGroup: /machine.slice/machine-qemu\x2d8\x2dinstance\x2d00000010.scope
             └─libvirt
               └─217311 /usr/libexec/qemu-kvm -name guest=instance-00000010,debug-threads=on -S -object "{\"qom-type\":\"secret\",\"id\":\"masterKey0\",\"format\":\"raw\",\"file\":\"/var/lib/libvirt/qemu/domain-8-instance-00000010/master-key.aes\"}" -machine pc-q35-rhel9.8.0,usb=off,dump-guest-core=off,memory-backend=pc.ram,hpet=off,acpi=on -accel kvm -cpu EPYC-Rome,x2apic=on,tsc-deadline=on,hypervisor=on,tsc-adjust=on,spec-ctrl=on,stibp=on,ssbd=on,cmp-legacy=on,overflow-recov=on,succor=on,ibrs=on,amd-ssbd=on,virt-ssbd=on,lbrv=on,tsc-scale=on,vmcb-clean=on,flushbyasid=on,pause-filter=on,pfthreshold=on,svme-addr-chk=on,lfence-always-serializing=on,xsaves=off -m size=131072k -object "{\"qom-type\":\"memory-backend-ram\",\"id\":\"pc.ram\",\"size\":134217728}" -overcommit mem-lock=off -smp 1,sockets=1,dies=1,clusters=1,cores=1,threads=1 -uuid bdd94295-7a8b-44b3-91c8-846d36f784a0 -smbios "type=1,manufacturer=RDO,product=OpenStack Compute,version=27.5.2-0.20260127144738.eaa65f0.el9,serial=bdd94295-7a8b-44b3-91c8-846d36f784a0,uuid=bdd94295-7a8b-44b3-91c8-846d36f784a0,family=Virtual Machine" -no-user-config -nodefaults -chardev socket,id=charmonitor,fd=34,server=on,wait=off -mon chardev=charmonitor,id=monitor,mode=control -rtc base=utc,driftfix=slew -global kvm-pit.lost_tick_policy=delay -no-shutdown -boot strict=on -device "{\"driver\":\"pcie-root-port\",\"port\":16,\"chassis\":1,\"id\":\"pci.1\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":17,\"chassis\":2,\"id\":\"pci.2\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":18,\"chassis\":3,\"id\":\"pci.3\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":19,\"chassis\":4,\"id\":\"pci.4\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":20,\"chassis\":5,\"id\":\"pci.5\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":21,\"chassis\":6,\"id\":\"pci.6\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":22,\"chassis\":7,\"id\":\"pci.7\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":23,\"chassis\":8,\"id\":\"pci.8\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":24,\"chassis\":9,\"id\":\"pci.9\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":25,\"chassis\":10,\"id\":\"pci.10\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":26,\"chassis\":11,\"id\":\"pci.11\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":27,\"chassis\":12,\"id\":\"pci.12\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":28,\"chassis\":13,\"id\":\"pci.13\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":29,\"chassis\":14,\"id\":\"pci.14\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":30,\"chassis\":15,\"id\":\"pci.15\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":31,\"chassis\":16,\"id\":\"pci.16\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":32,\"chassis\":17,\"id\":\"pci.17\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":33,\"chassis\":18,\"id\":\"pci.18\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":34,\"chassis\":19,\"id\":\"pci.19\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":35,\"chassis\":20,\"id\":\"pci.20\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":36,\"chassis\":21,\"id\":\"pci.21\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":37,\"chassis\":22,\"id\":\"pci.22\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":38,\"chassis\":23,\"id\":\"pci.23\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":39,\"chassis\":24,\"id\":\"pci.24\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":40,\"chassis\":25,\"id\":\"pci.25\",\"bus\":\"pcie.0\",\"addr\":\"0x5\"}" -device "{\"driver\":\"pcie-pci-bridge\",\"id\":\"pci.26\",\"bus\":\"pci.1\",\"addr\":\"0x0\"}" -device "{\"driver\":\"piix3-usb-uhci\",\"id\":\"usb\",\"bus\":\"pci.26\",\"addr\":\"0x1\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/_base/d8b1339cb6a58d9c630b446503c76b090a4f368e\",\"node-name\":\"libvirt-3-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/bdd94295-7a8b-44b3-91c8-846d36f784a0/disk\",\"node-name\":\"libvirt-2-storage\",\"auto-read-only\":true,\"discard\":\"unmap\",\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"node-name\":\"libvirt-2-format\",\"read-only\":false,\"cache\":{\"direct\":true,\"no-flush\":false},\"driver\":\"qcow2\",\"file\":\"libvirt-2-storage\",\"backing\":\"libvirt-3-storage\"}" -device "{\"driver\":\"virtio-blk-pci\",\"bus\":\"pci.4\",\"addr\":\"0x0\",\"drive\":\"libvirt-2-format\",\"id\":\"virtio-disk0\",\"bootindex\":1,\"write-cache\":\"on\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/bdd94295-7a8b-44b3-91c8-846d36f784a0/disk.config\",\"node-name\":\"libvirt-1-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -device "{\"driver\":\"ide-cd\",\"bus\":\"ide.0\",\"drive\":\"libvirt-1-storage\",\"id\":\"sata0-0-0\",\"write-cache\":\"on\"}" -netdev "{\"type\":\"tap\",\"fd\":\"39\",\"vhost\":true,\"vhostfd\":\"41\",\"id\":\"hostnet0\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet0\",\"id\":\"net0\",\"mac\":\"fa:16:3e:16:12:2b\",\"bus\":\"pci.2\",\"addr\":\"0x0\"}" -netdev "{\"type\":\"tap\",\"fd\":\"42\",\"vhost\":true,\"vhostfd\":\"47\",\"id\":\"hostnet1\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet1\",\"id\":\"net1\",\"mac\":\"fa:16:3e:9a:66:89\",\"bus\":\"pci.3\",\"addr\":\"0x0\"}" -add-fd set=0,fd=38,opaque=serial0-log -chardev pty,id=charserial0,logfile=/dev/fdset/0,logappend=on -device "{\"driver\":\"isa-serial\",\"chardev\":\"charserial0\",\"id\":\"serial0\",\"index\":0}" -device "{\"driver\":\"usb-tablet\",\"id\":\"input0\",\"bus\":\"usb.0\",\"port\":\"1\"}" -audiodev "{\"id\":\"audio1\",\"driver\":\"none\"}" -object "{\"qom-type\":\"tls-creds-x509\",\"id\":\"vnc-tls-creds0\",\"dir\":\"/etc/pki/qemu\",\"endpoint\":\"server\",\"verify-peer\":true}" -vnc "[::0]:1,tls-creds=vnc-tls-creds0,audiodev=audio1" -device "{\"driver\":\"virtio-vga\",\"id\":\"video0\",\"max_outputs\":1,\"bus\":\"pcie.0\",\"addr\":\"0x1\"}" -global ICH9-LPC.noreboot=off -watchdog-action reset -device "{\"driver\":\"virtio-balloon-pci\",\"id\":\"balloon0\",\"bus\":\"pci.5\",\"addr\":\"0x0\"}" -object "{\"qom-type\":\"rng-random\",\"id\":\"objrng0\",\"filename\":\"/dev/urandom\"}" -device "{\"driver\":\"virtio-rng-pci\",\"rng\":\"objrng0\",\"id\":\"rng0\",\"bus\":\"pci.6\",\"addr\":\"0x0\"}" -device "{\"driver\":\"vmcoreinfo\"}" -sandbox on,obsolete=deny,elevateprivileges=deny,spawn=deny,resourcecontrol=deny -msg timestamp=on

Jan 30 18:01:13 compute-1 systemd[1]: Started Virtual Machine qemu-8-instance-00000010.

● session-1.scope - Session 1 of User zuul
     Loaded: loaded (/run/systemd/transient/session-1.scope; transient)
  Transient: yes
     Active: active (abandoned) since Fri 2026-01-30 16:50:03 UTC; 3h 31min ago
         IO: 0B read, 0B written
      Tasks: 1
     Memory: 17.1M (peak: 39.3M)
        CPU: 1min 4.747s
     CGroup: /user.slice/user-1000.slice/session-1.scope
             └─4523 /usr/bin/python3

Jan 30 16:50:44 np0005602931.novalocal sudo[6870]: pam_unix(sudo:session): session closed for user root
Jan 30 16:50:45 np0005602931.novalocal python3[6900]: ansible-ansible.legacy.command Invoked with executable=/bin/bash _raw_params=env
                                                       _uses_shell=True zuul_log_id=fa163ef9-e89a-b421-de47-00000000001f-1-compute1 zuul_ansible_split_streams=False warn=False stdin_add_newline=True strip_empty_ends=True argv=None chdir=None creates=None removes=None stdin=None
Jan 30 16:50:46 np0005602931.novalocal python3[6928]: ansible-file Invoked with path=/home/zuul/workspace state=directory recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 30 16:51:09 np0005602931.novalocal sudo[6952]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-omjggjyiinvedjbwnpebfbwajkdxibij ; /usr/bin/python3'
Jan 30 16:51:09 np0005602931.novalocal sudo[6952]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 16:51:09 np0005602931.novalocal python3[6954]: ansible-ansible.builtin.file Invoked with path=/etc/ci/env state=directory mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None
Jan 30 16:51:09 np0005602931.novalocal sudo[6952]: pam_unix(sudo:session): session closed for user root
Jan 30 16:52:09 np0005602931.novalocal sshd-session[4322]: Received disconnect from 38.102.83.114 port 58930:11: disconnected by user
Jan 30 16:52:09 np0005602931.novalocal sshd-session[4322]: Disconnected from user zuul 38.102.83.114 port 58930
Jan 30 16:52:09 np0005602931.novalocal sshd-session[4308]: pam_unix(sshd:session): session closed for user zuul

● session-100.scope - Session 100 of User zuul
     Loaded: loaded (/run/systemd/transient/session-100.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:24:05 UTC; 57min ago
         IO: 0B read, 8.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.8M)
        CPU: 81ms
     CGroup: /user.slice/user-1000.slice/session-100.scope
             ├─247036 "sshd-session: zuul [priv]"
             └─247039 "sshd-session: zuul@notty"

Jan 30 19:24:05 compute-1 systemd[1]: Started Session 100 of User zuul.
Jan 30 19:24:06 compute-1 sudo[247069]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni genev_sys_6081 'icmp and ((ether host fa:16:3e:f6:73:e9 and ether host fa:16:3e:4f:2c:b2) or (ether host fa:16:3e:67:59:a6 and ether host fa:16:3e:0f:28:e8))' -w /tmp/tmp.1Mn6EOA6PB
Jan 30 19:24:06 compute-1 sudo[247069]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:26:06 compute-1 sudo[247069]: pam_unix(sudo:session): session closed for user root

● session-102.scope - Session 102 of User zuul
     Loaded: loaded (/run/systemd/transient/session-102.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:24:14 UTC; 57min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 4.1M)
        CPU: 60ms
     CGroup: /user.slice/user-1000.slice/session-102.scope
             ├─247152 "sshd-session: zuul [priv]"
             └─247155 "sshd-session: zuul@notty"

Jan 30 19:24:14 compute-1 systemd[1]: Started Session 102 of User zuul.
Jan 30 19:24:14 compute-1 sudo[247156]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.1Mn6EOA6PB
Jan 30 19:24:14 compute-1 sudo[247156]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:24:14 compute-1 sudo[247156]: pam_unix(sudo:session): session closed for user root

● session-105.scope - Session 105 of User zuul
     Loaded: loaded (/run/systemd/transient/session-105.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:24:40 UTC; 56min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.7M)
        CPU: 98ms
     CGroup: /user.slice/user-1000.slice/session-105.scope
             ├─247407 "sshd-session: zuul [priv]"
             └─247410 "sshd-session: zuul@notty"

Jan 30 19:24:40 compute-1 systemd[1]: Started Session 105 of User zuul.
Jan 30 19:24:41 compute-1 sudo[247440]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:4e:e9:19 -w /tmp/tmp.P2EdHoBkAz
Jan 30 19:24:41 compute-1 sudo[247440]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:26:41 compute-1 sudo[247440]: pam_unix(sudo:session): session closed for user root

● session-107.scope - Session 107 of User zuul
     Loaded: loaded (/run/systemd/transient/session-107.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:24:49 UTC; 56min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.1M (peak: 4.1M)
        CPU: 85ms
     CGroup: /user.slice/user-1000.slice/session-107.scope
             ├─247511 "sshd-session: zuul [priv]"
             └─247514 "sshd-session: zuul@notty"

Jan 30 19:24:49 compute-1 systemd[1]: Started Session 107 of User zuul.
Jan 30 19:24:49 compute-1 sudo[247515]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.P2EdHoBkAz
Jan 30 19:24:49 compute-1 sudo[247515]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:24:49 compute-1 sudo[247515]: pam_unix(sudo:session): session closed for user root

● session-108.scope - Session 108 of User zuul
     Loaded: loaded (/run/systemd/transient/session-108.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:25:13 UTC; 56min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.8M)
        CPU: 91ms
     CGroup: /user.slice/user-1000.slice/session-108.scope
             ├─247824 "sshd-session: zuul [priv]"
             └─247827 "sshd-session: zuul@notty"

Jan 30 19:25:13 compute-1 systemd[1]: Started Session 108 of User zuul.
Jan 30 19:25:13 compute-1 sudo[247901]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:a8:68:85 -w /tmp/tmp.TdM5x5PM1K
Jan 30 19:25:13 compute-1 sudo[247901]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:27:13 compute-1 sudo[247901]: pam_unix(sudo:session): session closed for user root

● session-110.scope - Session 110 of User zuul
     Loaded: loaded (/run/systemd/transient/session-110.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:25:22 UTC; 56min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 4.1M)
        CPU: 62ms
     CGroup: /user.slice/user-1000.slice/session-110.scope
             ├─247944 "sshd-session: zuul [priv]"
             └─247977 "sshd-session: zuul@notty"

Jan 30 19:25:22 compute-1 systemd[1]: Started Session 110 of User zuul.
Jan 30 19:25:22 compute-1 sudo[247978]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.TdM5x5PM1K
Jan 30 19:25:22 compute-1 sudo[247978]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:25:22 compute-1 sudo[247978]: pam_unix(sudo:session): session closed for user root

● session-111.scope - Session 111 of User zuul
     Loaded: loaded (/run/systemd/transient/session-111.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:25:24 UTC; 56min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.7M)
        CPU: 99ms
     CGroup: /user.slice/user-1000.slice/session-111.scope
             ├─248004 "sshd-session: zuul [priv]"
             └─248007 "sshd-session: zuul@notty"

Jan 30 19:25:24 compute-1 systemd[1]: Started Session 111 of User zuul.
Jan 30 19:25:24 compute-1 sudo[248037]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni genev_sys_6081 'icmp and ((ether host fa:16:3e:f6:73:e9 and ether host fa:16:3e:4f:2c:b2) or (ether host fa:16:3e:67:59:a6 and ether host fa:16:3e:0f:28:e8))' -w /tmp/tmp.1x2cxvg40X
Jan 30 19:25:24 compute-1 sudo[248037]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:27:24 compute-1 sudo[248037]: pam_unix(sudo:session): session closed for user root

● session-113.scope - Session 113 of User zuul
     Loaded: loaded (/run/systemd/transient/session-113.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:25:33 UTC; 55min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 4.2M)
        CPU: 91ms
     CGroup: /user.slice/user-1000.slice/session-113.scope
             ├─248105 "sshd-session: zuul [priv]"
             └─248108 "sshd-session: zuul@notty"

Jan 30 19:25:33 compute-1 systemd[1]: Started Session 113 of User zuul.
Jan 30 19:25:33 compute-1 sudo[248109]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.1x2cxvg40X
Jan 30 19:25:33 compute-1 sudo[248109]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:25:33 compute-1 sudo[248109]: pam_unix(sudo:session): session closed for user root

● session-120.scope - Session 120 of User zuul
     Loaded: loaded (/run/systemd/transient/session-120.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:27:13 UTC; 54min ago
         IO: 0B read, 12.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.7M)
        CPU: 119ms
     CGroup: /user.slice/user-1000.slice/session-120.scope
             ├─248982 "sshd-session: zuul [priv]"
             └─248985 "sshd-session: zuul@notty"

Jan 30 19:27:13 compute-1 systemd[1]: Started Session 120 of User zuul.
Jan 30 19:27:14 compute-1 sudo[249015]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:aa:8e:98 -w /tmp/tmp.Y9u4QBUe4O
Jan 30 19:27:14 compute-1 sudo[249015]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:29:14 compute-1 sudo[249015]: pam_unix(sudo:session): session closed for user root

● session-122.scope - Session 122 of User zuul
     Loaded: loaded (/run/systemd/transient/session-122.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:27:35 UTC; 53min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.9M)
        CPU: 101ms
     CGroup: /user.slice/user-1000.slice/session-122.scope
             ├─249152 "sshd-session: zuul [priv]"
             └─249196 "sshd-session: zuul@notty"

Jan 30 19:27:35 compute-1 systemd[1]: Started Session 122 of User zuul.
Jan 30 19:27:35 compute-1 sudo[249201]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.Y9u4QBUe4O
Jan 30 19:27:35 compute-1 sudo[249201]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:27:35 compute-1 sudo[249201]: pam_unix(sudo:session): session closed for user root

● session-124.scope - Session 124 of User zuul
     Loaded: loaded (/run/systemd/transient/session-124.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:28:20 UTC; 53min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.7M)
        CPU: 79ms
     CGroup: /user.slice/user-1000.slice/session-124.scope
             ├─249529 "sshd-session: zuul [priv]"
             └─249532 "sshd-session: zuul@notty"

Jan 30 19:28:20 compute-1 systemd[1]: Started Session 124 of User zuul.
Jan 30 19:28:21 compute-1 sudo[249562]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:09:cc:a5 -w /tmp/tmp.9TSfu2ZSdc
Jan 30 19:28:21 compute-1 sudo[249562]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:30:21 compute-1 sudo[249562]: pam_unix(sudo:session): session closed for user root

● session-126.scope - Session 126 of User zuul
     Loaded: loaded (/run/systemd/transient/session-126.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:28:42 UTC; 52min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.1M (peak: 4.0M)
        CPU: 79ms
     CGroup: /user.slice/user-1000.slice/session-126.scope
             ├─249678 "sshd-session: zuul [priv]"
             └─249681 "sshd-session: zuul@notty"

Jan 30 19:28:42 compute-1 systemd[1]: Started Session 126 of User zuul.
Jan 30 19:28:42 compute-1 sudo[249682]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.9TSfu2ZSdc
Jan 30 19:28:42 compute-1 sudo[249682]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:28:42 compute-1 sudo[249682]: pam_unix(sudo:session): session closed for user root

● session-128.scope - Session 128 of User zuul
     Loaded: loaded (/run/systemd/transient/session-128.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:29:31 UTC; 51min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.7M)
        CPU: 98ms
     CGroup: /user.slice/user-1000.slice/session-128.scope
             ├─249968 "sshd-session: zuul [priv]"
             └─249971 "sshd-session: zuul@notty"

Jan 30 19:29:31 compute-1 systemd[1]: Started Session 128 of User zuul.
Jan 30 19:29:32 compute-1 sudo[250001]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:5e:d4:c0 -w /tmp/tmp.KRPx4VIFIo
Jan 30 19:29:32 compute-1 sudo[250001]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:31:32 compute-1 sudo[250001]: pam_unix(sudo:session): session closed for user root

● session-130.scope - Session 130 of User zuul
     Loaded: loaded (/run/systemd/transient/session-130.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:29:41 UTC; 51min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 4.4M)
        CPU: 74ms
     CGroup: /user.slice/user-1000.slice/session-130.scope
             ├─250070 "sshd-session: zuul [priv]"
             └─250073 "sshd-session: zuul@notty"

Jan 30 19:29:41 compute-1 systemd[1]: Started Session 130 of User zuul.
Jan 30 19:29:41 compute-1 sudo[250074]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.KRPx4VIFIo
Jan 30 19:29:41 compute-1 sudo[250074]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:29:41 compute-1 sudo[250074]: pam_unix(sudo:session): session closed for user root

● session-131.scope - Session 131 of User zuul
     Loaded: loaded (/run/systemd/transient/session-131.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:29:50 UTC; 51min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.7M)
        CPU: 82ms
     CGroup: /user.slice/user-1000.slice/session-131.scope
             ├─250101 "sshd-session: zuul [priv]"
             └─250104 "sshd-session: zuul@notty"

Jan 30 19:29:50 compute-1 systemd[1]: Started Session 131 of User zuul.
Jan 30 19:29:50 compute-1 sudo[250134]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:7b:15:c5 -w /tmp/tmp.MTEq2557gG
Jan 30 19:29:50 compute-1 sudo[250134]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:31:51 compute-1 sudo[250134]: pam_unix(sudo:session): session closed for user root

● session-133.scope - Session 133 of User zuul
     Loaded: loaded (/run/systemd/transient/session-133.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:29:59 UTC; 51min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 4.1M)
        CPU: 95ms
     CGroup: /user.slice/user-1000.slice/session-133.scope
             ├─250202 "sshd-session: zuul [priv]"
             └─250205 "sshd-session: zuul@notty"

Jan 30 19:29:59 compute-1 systemd[1]: Started Session 133 of User zuul.
Jan 30 19:30:00 compute-1 sudo[250206]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.MTEq2557gG
Jan 30 19:30:00 compute-1 sudo[250206]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:30:00 compute-1 sudo[250206]: pam_unix(sudo:session): session closed for user root

● session-134.scope - Session 134 of User zuul
     Loaded: loaded (/run/systemd/transient/session-134.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:30:09 UTC; 51min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.7M)
        CPU: 102ms
     CGroup: /user.slice/user-1000.slice/session-134.scope
             ├─250290 "sshd-session: zuul [priv]"
             └─250312 "sshd-session: zuul@notty"

Jan 30 19:30:09 compute-1 systemd[1]: Started Session 134 of User zuul.
Jan 30 19:30:10 compute-1 sudo[250367]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:7b:15:c5 -w /tmp/tmp.MZTpXwPILJ
Jan 30 19:30:10 compute-1 sudo[250367]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:32:10 compute-1 sudo[250367]: pam_unix(sudo:session): session closed for user root

● session-136.scope - Session 136 of User zuul
     Loaded: loaded (/run/systemd/transient/session-136.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:30:31 UTC; 50min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.8M)
        CPU: 93ms
     CGroup: /user.slice/user-1000.slice/session-136.scope
             ├─250434 "sshd-session: zuul [priv]"
             └─250449 "sshd-session: zuul@notty"

Jan 30 19:30:31 compute-1 systemd[1]: Started Session 136 of User zuul.
Jan 30 19:30:32 compute-1 sudo[250470]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.MZTpXwPILJ
Jan 30 19:30:32 compute-1 sudo[250470]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:30:32 compute-1 sudo[250470]: pam_unix(sudo:session): session closed for user root

● session-162.scope - Session 162 of User zuul
     Loaded: loaded (/run/systemd/transient/session-162.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 20:20:56 UTC; 27s ago
         IO: 102.6M read, 23.2M written
      Tasks: 15
     Memory: 696.1M (peak: 755.2M)
        CPU: 1min 8.758s
     CGroup: /user.slice/user-1000.slice/session-162.scope
             ├─267972 "sshd-session: zuul [priv]"
             ├─267975 "sshd-session: zuul@notty"
             ├─267976 sudo bash -c "rm -rf /var/tmp/sos-osp && mkdir /var/tmp/sos-osp && sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp  -p container,openstack_edpm,system,storage,virt"
             ├─268000 /usr/bin/python3 -s /sbin/sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp -p container,openstack_edpm,system,storage,virt
             ├─271208 timeout 15s turbostat --debug sleep 10
             ├─272225 timeout 300s systemctl status --all
             ├─272226 systemctl status --all
             ├─272295 timeout --foreground 300s virsh -r nodedev-dumpxml net_tapbdcc71de_2c_fe_16_3e_9a_66_89
             └─272296 virsh -r nodedev-dumpxml net_tapbdcc71de_2c_fe_16_3e_9a_66_89

Jan 30 20:20:56 compute-1 systemd[1]: Started Session 162 of User zuul.
Jan 30 20:20:56 compute-1 sudo[267976]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/bash -c 'rm -rf /var/tmp/sos-osp && mkdir /var/tmp/sos-osp && sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp  -p container,openstack_edpm,system,storage,virt'
Jan 30 20:20:56 compute-1 sudo[267976]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 20:21:14 compute-1 ovs-appctl[270189]: ovs|00001|daemon_unix|WARN|/var/run/openvswitch/ovs-monitor-ipsec.pid: open: No such file or directory

● session-48.scope - Session 48 of User zuul
     Loaded: loaded (/run/systemd/transient/session-48.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:16:52 UTC; 1h 4min ago
         IO: 1.4M read, 4.0K written
      Tasks: 2
     Memory: 2.5M (peak: 7.2M)
        CPU: 101ms
     CGroup: /user.slice/user-1000.slice/session-48.scope
             ├─242639 "sshd-session: zuul [priv]"
             └─242642 "sshd-session: zuul@notty"

Jan 30 19:16:52 compute-1 systemd[1]: Started Session 48 of User zuul.
Jan 30 19:16:52 compute-1 sudo[242672]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:04:31:06 -w /tmp/tmp.Lr8zmPVhpv
Jan 30 19:16:52 compute-1 sudo[242672]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:18:53 compute-1 sudo[242672]: pam_unix(sudo:session): session closed for user root

● session-50.scope - Session 50 of User zuul
     Loaded: loaded (/run/systemd/transient/session-50.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:17:14 UTC; 1h 4min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 4.0M)
        CPU: 94ms
     CGroup: /user.slice/user-1000.slice/session-50.scope
             ├─242795 "sshd-session: zuul [priv]"
             └─242798 "sshd-session: zuul@notty"

Jan 30 19:17:14 compute-1 systemd[1]: Started Session 50 of User zuul.
Jan 30 19:17:14 compute-1 sudo[242799]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.Lr8zmPVhpv
Jan 30 19:17:14 compute-1 sudo[242799]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:17:14 compute-1 sudo[242799]: pam_unix(sudo:session): session closed for user root

● session-51.scope - Session 51 of User zuul
     Loaded: loaded (/run/systemd/transient/session-51.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:17:22 UTC; 1h 4min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.7M)
        CPU: 97ms
     CGroup: /user.slice/user-1000.slice/session-51.scope
             ├─242870 "sshd-session: zuul [priv]"
             └─242873 "sshd-session: zuul@notty"

Jan 30 19:17:22 compute-1 systemd[1]: Started Session 51 of User zuul.
Jan 30 19:17:23 compute-1 sudo[242903]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:0f:37:cf -w /tmp/tmp.Lx5PefsY37
Jan 30 19:17:23 compute-1 sudo[242903]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:19:23 compute-1 sudo[242903]: pam_unix(sudo:session): session closed for user root

● session-53.scope - Session 53 of User zuul
     Loaded: loaded (/run/systemd/transient/session-53.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:17:32 UTC; 1h 3min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.9M)
        CPU: 79ms
     CGroup: /user.slice/user-1000.slice/session-53.scope
             ├─242971 "sshd-session: zuul [priv]"
             └─242974 "sshd-session: zuul@notty"

Jan 30 19:17:32 compute-1 systemd[1]: Started Session 53 of User zuul.
Jan 30 19:17:32 compute-1 sudo[242975]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.Lx5PefsY37
Jan 30 19:17:32 compute-1 sudo[242975]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:17:32 compute-1 sudo[242975]: pam_unix(sudo:session): session closed for user root

● session-54.scope - Session 54 of User zuul
     Loaded: loaded (/run/systemd/transient/session-54.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:17:40 UTC; 1h 3min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.7M)
        CPU: 78ms
     CGroup: /user.slice/user-1000.slice/session-54.scope
             ├─243049 "sshd-session: zuul [priv]"
             └─243052 "sshd-session: zuul@notty"

Jan 30 19:17:40 compute-1 systemd[1]: Started Session 54 of User zuul.
Jan 30 19:17:40 compute-1 sudo[243082]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:04:31:06 -w /tmp/tmp.xf9DKOT2FJ
Jan 30 19:17:40 compute-1 sudo[243082]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:19:40 compute-1 sudo[243082]: pam_unix(sudo:session): session closed for user root

● session-56.scope - Session 56 of User zuul
     Loaded: loaded (/run/systemd/transient/session-56.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:17:49 UTC; 1h 3min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.9M)
        CPU: 51ms
     CGroup: /user.slice/user-1000.slice/session-56.scope
             ├─243121 "sshd-session: zuul [priv]"
             └─243131 "sshd-session: zuul@notty"

Jan 30 19:17:49 compute-1 systemd[1]: Started Session 56 of User zuul.
Jan 30 19:17:49 compute-1 sudo[243162]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.xf9DKOT2FJ
Jan 30 19:17:49 compute-1 sudo[243162]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:17:49 compute-1 sudo[243162]: pam_unix(sudo:session): session closed for user root

● session-57.scope - Session 57 of User zuul
     Loaded: loaded (/run/systemd/transient/session-57.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:17:56 UTC; 1h 3min ago
         IO: 0B read, 8.0K written
      Tasks: 2
     Memory: 1.0M (peak: 6.1M)
        CPU: 87ms
     CGroup: /user.slice/user-1000.slice/session-57.scope
             ├─243214 "sshd-session: zuul [priv]"
             └─243217 "sshd-session: zuul@notty"

Jan 30 19:17:56 compute-1 systemd[1]: Started Session 57 of User zuul.
Jan 30 19:17:57 compute-1 sudo[243247]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:d1:5a:46 -w /tmp/tmp.nF8noGoTVu
Jan 30 19:17:57 compute-1 sudo[243247]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:19:57 compute-1 sudo[243247]: pam_unix(sudo:session): session closed for user root

● session-59.scope - Session 59 of User zuul
     Loaded: loaded (/run/systemd/transient/session-59.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:18:05 UTC; 1h 3min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 4.2M)
        CPU: 80ms
     CGroup: /user.slice/user-1000.slice/session-59.scope
             ├─243317 "sshd-session: zuul [priv]"
             └─243320 "sshd-session: zuul@notty"

Jan 30 19:18:05 compute-1 systemd[1]: Started Session 59 of User zuul.
Jan 30 19:18:06 compute-1 sudo[243321]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.nF8noGoTVu
Jan 30 19:18:06 compute-1 sudo[243321]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:18:06 compute-1 sudo[243321]: pam_unix(sudo:session): session closed for user root

● session-60.scope - Session 60 of User zuul
     Loaded: loaded (/run/systemd/transient/session-60.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:18:14 UTC; 1h 3min ago
         IO: 0B read, 8.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.8M)
        CPU: 88ms
     CGroup: /user.slice/user-1000.slice/session-60.scope
             ├─243597 "sshd-session: zuul [priv]"
             └─243600 "sshd-session: zuul@notty"

Jan 30 19:18:14 compute-1 systemd[1]: Started Session 60 of User zuul.
Jan 30 19:18:14 compute-1 sudo[243630]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:d1:5a:46 -w /tmp/tmp.k1sR16MZw1
Jan 30 19:18:14 compute-1 sudo[243630]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:20:14 compute-1 sudo[243630]: pam_unix(sudo:session): session closed for user root

● session-62.scope - Session 62 of User zuul
     Loaded: loaded (/run/systemd/transient/session-62.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:18:36 UTC; 1h 2min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.8M)
        CPU: 103ms
     CGroup: /user.slice/user-1000.slice/session-62.scope
             ├─243757 "sshd-session: zuul [priv]"
             └─243760 "sshd-session: zuul@notty"

Jan 30 19:18:36 compute-1 systemd[1]: Started Session 62 of User zuul.
Jan 30 19:18:36 compute-1 sudo[243761]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.k1sR16MZw1
Jan 30 19:18:36 compute-1 sudo[243761]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:18:36 compute-1 sudo[243761]: pam_unix(sudo:session): session closed for user root

● session-68.scope - Session 68 of User zuul
     Loaded: loaded (/run/systemd/transient/session-68.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:20:05 UTC; 1h 1min ago
         IO: 0B read, 8.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.7M)
        CPU: 98ms
     CGroup: /user.slice/user-1000.slice/session-68.scope
             ├─244541 "sshd-session: zuul [priv]"
             └─244560 "sshd-session: zuul@notty"

Jan 30 19:20:05 compute-1 systemd[1]: Started Session 68 of User zuul.
Jan 30 19:20:06 compute-1 sudo[244616]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:bb:2f:ea -w /tmp/tmp.dd5itz6pXh
Jan 30 19:20:06 compute-1 sudo[244616]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:22:06 compute-1 sudo[244616]: pam_unix(sudo:session): session closed for user root

● session-70.scope - Session 70 of User zuul
     Loaded: loaded (/run/systemd/transient/session-70.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:20:14 UTC; 1h 1min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 4.4M)
        CPU: 94ms
     CGroup: /user.slice/user-1000.slice/session-70.scope
             ├─244705 "sshd-session: zuul [priv]"
             └─244708 "sshd-session: zuul@notty"

Jan 30 19:20:14 compute-1 systemd[1]: Started Session 70 of User zuul.
Jan 30 19:20:15 compute-1 sudo[244709]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.dd5itz6pXh
Jan 30 19:20:15 compute-1 sudo[244709]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:20:15 compute-1 sudo[244709]: pam_unix(sudo:session): session closed for user root

● session-71.scope - Session 71 of User zuul
     Loaded: loaded (/run/systemd/transient/session-71.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:20:16 UTC; 1h 1min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.9M)
        CPU: 99ms
     CGroup: /user.slice/user-1000.slice/session-71.scope
             ├─244735 "sshd-session: zuul [priv]"
             └─244738 "sshd-session: zuul@notty"

Jan 30 19:20:16 compute-1 systemd[1]: Started Session 71 of User zuul.
Jan 30 19:20:16 compute-1 sudo[244768]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:07:fa:b1 -w /tmp/tmp.iBGcDs08pk
Jan 30 19:20:16 compute-1 sudo[244768]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:22:16 compute-1 sudo[244768]: pam_unix(sudo:session): session closed for user root

● session-73.scope - Session 73 of User zuul
     Loaded: loaded (/run/systemd/transient/session-73.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:20:25 UTC; 1h 0min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 4.0M)
        CPU: 86ms
     CGroup: /user.slice/user-1000.slice/session-73.scope
             ├─244839 "sshd-session: zuul [priv]"
             └─244842 "sshd-session: zuul@notty"

Jan 30 19:20:25 compute-1 systemd[1]: Started Session 73 of User zuul.
Jan 30 19:20:25 compute-1 sudo[244843]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.iBGcDs08pk
Jan 30 19:20:25 compute-1 sudo[244843]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:20:25 compute-1 sudo[244843]: pam_unix(sudo:session): session closed for user root

● session-74.scope - Session 74 of User zuul
     Loaded: loaded (/run/systemd/transient/session-74.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:20:26 UTC; 1h 0min ago
         IO: 0B read, 8.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.8M)
        CPU: 99ms
     CGroup: /user.slice/user-1000.slice/session-74.scope
             ├─244869 "sshd-session: zuul [priv]"
             └─244872 "sshd-session: zuul@notty"

Jan 30 19:20:26 compute-1 systemd[1]: Started Session 74 of User zuul.
Jan 30 19:20:26 compute-1 sudo[244902]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni genev_sys_6081 icmp and ether host fa:16:3e:ea:8e:55 and ether host fa:16:3e:5e:32:2f -w /tmp/tmp.Fowei5Waeb
Jan 30 19:20:26 compute-1 sudo[244902]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:22:26 compute-1 sudo[244902]: pam_unix(sudo:session): session closed for user root

● session-76.scope - Session 76 of User zuul
     Loaded: loaded (/run/systemd/transient/session-76.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:20:35 UTC; 1h 0min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 4.0M)
        CPU: 83ms
     CGroup: /user.slice/user-1000.slice/session-76.scope
             ├─244930 "sshd-session: zuul [priv]"
             └─244933 "sshd-session: zuul@notty"

Jan 30 19:20:35 compute-1 systemd[1]: Started Session 76 of User zuul.
Jan 30 19:20:35 compute-1 sudo[244934]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.Fowei5Waeb
Jan 30 19:20:35 compute-1 sudo[244934]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:20:35 compute-1 sudo[244934]: pam_unix(sudo:session): session closed for user root

● session-79.scope - Session 79 of User zuul
     Loaded: loaded (/run/systemd/transient/session-79.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:21:17 UTC; 1h 0min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.8M)
        CPU: 107ms
     CGroup: /user.slice/user-1000.slice/session-79.scope
             ├─245397 "sshd-session: zuul [priv]"
             └─245400 "sshd-session: zuul@notty"

Jan 30 19:21:17 compute-1 systemd[1]: Started Session 79 of User zuul.
Jan 30 19:21:17 compute-1 sudo[245430]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:bb:2f:ea -w /tmp/tmp.MCl2QSd8qr
Jan 30 19:21:17 compute-1 sudo[245430]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:23:18 compute-1 sudo[245430]: pam_unix(sudo:session): session closed for user root

● session-81.scope - Session 81 of User zuul
     Loaded: loaded (/run/systemd/transient/session-81.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:21:26 UTC; 59min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 4.0M)
        CPU: 90ms
     CGroup: /user.slice/user-1000.slice/session-81.scope
             ├─245500 "sshd-session: zuul [priv]"
             └─245503 "sshd-session: zuul@notty"

Jan 30 19:21:26 compute-1 systemd[1]: Started Session 81 of User zuul.
Jan 30 19:21:26 compute-1 sudo[245504]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.MCl2QSd8qr
Jan 30 19:21:26 compute-1 sudo[245504]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:21:26 compute-1 sudo[245504]: pam_unix(sudo:session): session closed for user root

● session-82.scope - Session 82 of User zuul
     Loaded: loaded (/run/systemd/transient/session-82.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:21:27 UTC; 59min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.8M)
        CPU: 98ms
     CGroup: /user.slice/user-1000.slice/session-82.scope
             ├─245530 "sshd-session: zuul [priv]"
             └─245533 "sshd-session: zuul@notty"

Jan 30 19:21:27 compute-1 systemd[1]: Started Session 82 of User zuul.
Jan 30 19:21:28 compute-1 sudo[245563]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:07:fa:b1 -w /tmp/tmp.QwbpSTJPY4
Jan 30 19:21:28 compute-1 sudo[245563]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:23:28 compute-1 sudo[245563]: pam_unix(sudo:session): session closed for user root

● session-84.scope - Session 84 of User zuul
     Loaded: loaded (/run/systemd/transient/session-84.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:21:37 UTC; 59min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 4.0M)
        CPU: 84ms
     CGroup: /user.slice/user-1000.slice/session-84.scope
             ├─245589 "sshd-session: zuul [priv]"
             └─245592 "sshd-session: zuul@notty"

Jan 30 19:21:37 compute-1 systemd[1]: Started Session 84 of User zuul.
Jan 30 19:21:37 compute-1 sudo[245593]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.QwbpSTJPY4
Jan 30 19:21:37 compute-1 sudo[245593]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:21:37 compute-1 sudo[245593]: pam_unix(sudo:session): session closed for user root

● session-85.scope - Session 85 of User zuul
     Loaded: loaded (/run/systemd/transient/session-85.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:21:38 UTC; 59min ago
         IO: 0B read, 4.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.8M)
        CPU: 99ms
     CGroup: /user.slice/user-1000.slice/session-85.scope
             ├─245619 "sshd-session: zuul [priv]"
             └─245634 "sshd-session: zuul@notty"

Jan 30 19:21:38 compute-1 systemd[1]: Started Session 85 of User zuul.
Jan 30 19:21:38 compute-1 sudo[245693]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni genev_sys_6081 icmp and ether host fa:16:3e:ea:8e:55 and ether host fa:16:3e:5e:32:2f -w /tmp/tmp.39ktBmSUgQ
Jan 30 19:21:38 compute-1 sudo[245693]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:23:38 compute-1 sudo[245693]: pam_unix(sudo:session): session closed for user root

● session-87.scope - Session 87 of User zuul
     Loaded: loaded (/run/systemd/transient/session-87.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:21:47 UTC; 59min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 4.1M)
        CPU: 88ms
     CGroup: /user.slice/user-1000.slice/session-87.scope
             ├─245770 "sshd-session: zuul [priv]"
             └─245773 "sshd-session: zuul@notty"

Jan 30 19:21:47 compute-1 systemd[1]: Started Session 87 of User zuul.
Jan 30 19:21:47 compute-1 sudo[245774]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.39ktBmSUgQ
Jan 30 19:21:47 compute-1 sudo[245774]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:21:47 compute-1 sudo[245774]: pam_unix(sudo:session): session closed for user root

● session-94.scope - Session 94 of User zuul
     Loaded: loaded (/run/systemd/transient/session-94.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:23:01 UTC; 58min ago
         IO: 0B read, 8.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.9M)
        CPU: 80ms
     CGroup: /user.slice/user-1000.slice/session-94.scope
             ├─246551 "sshd-session: zuul [priv]"
             └─246554 "sshd-session: zuul@notty"

Jan 30 19:23:01 compute-1 systemd[1]: Started Session 94 of User zuul.
Jan 30 19:23:02 compute-1 sudo[246584]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:4e:e9:19 -w /tmp/tmp.d7c2KsqN7r
Jan 30 19:23:02 compute-1 sudo[246584]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:25:02 compute-1 sudo[246584]: pam_unix(sudo:session): session closed for user root

● session-96.scope - Session 96 of User zuul
     Loaded: loaded (/run/systemd/transient/session-96.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:23:23 UTC; 58min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.7M)
        CPU: 93ms
     CGroup: /user.slice/user-1000.slice/session-96.scope
             ├─246719 "sshd-session: zuul [priv]"
             └─246722 "sshd-session: zuul@notty"

Jan 30 19:23:23 compute-1 systemd[1]: Started Session 96 of User zuul.
Jan 30 19:23:23 compute-1 sudo[246723]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.d7c2KsqN7r
Jan 30 19:23:23 compute-1 sudo[246723]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:23:23 compute-1 sudo[246723]: pam_unix(sudo:session): session closed for user root

● session-97.scope - Session 97 of User zuul
     Loaded: loaded (/run/systemd/transient/session-97.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:23:42 UTC; 57min ago
         IO: 0B read, 8.0K written
      Tasks: 2
     Memory: 1.0M (peak: 5.7M)
        CPU: 102ms
     CGroup: /user.slice/user-1000.slice/session-97.scope
             ├─246796 "sshd-session: zuul [priv]"
             └─246813 "sshd-session: zuul@notty"

Jan 30 19:23:42 compute-1 systemd[1]: Started Session 97 of User zuul.
Jan 30 19:23:42 compute-1 sudo[246873]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 tcpdump -s0 -Uni eth1 icmp and ether host fa:16:3e:a8:68:85 -w /tmp/tmp.VS3rExsK6g
Jan 30 19:23:42 compute-1 sudo[246873]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:25:42 compute-1 sudo[246873]: pam_unix(sudo:session): session closed for user root

● session-99.scope - Session 99 of User zuul
     Loaded: loaded (/run/systemd/transient/session-99.scope; transient)
  Transient: yes
     Active: active (running) since Fri 2026-01-30 19:24:03 UTC; 57min ago
         IO: 0B read, 0B written
      Tasks: 2
     Memory: 1.0M (peak: 3.8M)
        CPU: 88ms
     CGroup: /user.slice/user-1000.slice/session-99.scope
             ├─247006 "sshd-session: zuul [priv]"
             └─247009 "sshd-session: zuul@notty"

Jan 30 19:24:03 compute-1 systemd[1]: Started Session 99 of User zuul.
Jan 30 19:24:03 compute-1 sudo[247010]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/timeout 120 cat /tmp/tmp.VS3rExsK6g
Jan 30 19:24:03 compute-1 sudo[247010]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 19:24:03 compute-1 sudo[247010]: pam_unix(sudo:session): session closed for user root

○ 407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a-41b4adf9ee4505ef.service - /usr/bin/podman healthcheck run 407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a
     Loaded: loaded (/run/systemd/transient/407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a-41b4adf9ee4505ef.service; transient)
  Transient: yes
     Active: inactive (dead) since Fri 2026-01-30 20:20:56 UTC; 28s ago
   Duration: 110ms
TriggeredBy: ● 407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a-41b4adf9ee4505ef.timer
    Process: 267906 ExecStart=/usr/bin/podman healthcheck run 407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a (code=exited, status=0/SUCCESS)
   Main PID: 267906 (code=exited, status=0/SUCCESS)
        CPU: 99ms

Jan 30 20:20:56 compute-1 podman[267906]: 2026-01-30 20:20:56.586473632 +0000 UTC m=+0.094713714 container health_status 407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_id=node_exporter, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'EDPM_CONFIG_HASH': 'f3391df1932a31a4475305bf7ba459820c071ee6bc26dffcd59f1c41f8b252f0-1906ca889f747a7874c38f7dde1e751324374c17012dee147154da58b6efddd8'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/node_exporter', 'test': '/openstack/healthcheck node_exporter'}, 'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'net': 'host', 'ports': ['9100:9100'], 'privileged': True, 'recreate': True, 'restart': 'always', 'user': 'root', 'volumes': ['/var/lib/openstack/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']})

○ 4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd-592662485594b6f2.service - /usr/bin/podman healthcheck run 4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd
     Loaded: loaded (/run/systemd/transient/4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd-592662485594b6f2.service; transient)
  Transient: yes
     Active: inactive (dead) since Fri 2026-01-30 20:21:15 UTC; 9s ago
   Duration: 94ms
TriggeredBy: ● 4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd-592662485594b6f2.timer
    Process: 270534 ExecStart=/usr/bin/podman healthcheck run 4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd (code=exited, status=0/SUCCESS)
   Main PID: 270534 (code=exited, status=0/SUCCESS)
        CPU: 87ms

Jan 30 20:21:15 compute-1 podman[270534]: 2026-01-30 20:21:15.569632636 +0000 UTC m=+0.076115446 container health_status 4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd (image=quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified, name=ceilometer_agent_compute, health_status=healthy, health_failing_streak=0, health_log=, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, config_data={'command': 'kolla_start', 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'OS_ENDPOINT_TYPE': 'internal', 'EDPM_CONFIG_HASH': 'fb0ba4ae2b01ec1faae83a80acf21c3ad3948d5c7b1e5820f87360e814103f4d-f3391df1932a31a4475305bf7ba459820c071ee6bc26dffcd59f1c41f8b252f0-1906ca889f747a7874c38f7dde1e751324374c17012dee147154da58b6efddd8-1906ca889f747a7874c38f7dde1e751324374c17012dee147154da58b6efddd8'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ceilometer_agent_compute', 'test': '/openstack/healthcheck compute'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified', 'net': 'host', 'restart': 'always', 'security_opt': 'label:type:ceilometer_polling_t', 'user': 'ceilometer', 'volumes': ['/var/lib/openstack/telemetry:/var/lib/kolla/config_files/src:z', '/var/lib/kolla/config_files/ceilometer_agent_compute.json:/var/lib/kolla/config_files/config.json:z', '/run/libvirt:/run/libvirt:shared,ro', '/etc/hosts:/etc/hosts:ro', '/etc/pki/tls/certs/ca-bundle.trust.crt:/etc/pki/tls/certs/ca-bundle.trust.crt:ro', '/etc/localtime:/etc/localtime:ro', '/etc/pki/ca-trust/source/anchors:/etc/pki/ca-trust/source/anchors:ro', '/var/lib/openstack/cacerts/telemetry/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/telemetry/ceilometer_prom_exporter.yaml:/etc/ceilometer/ceilometer_prom_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/ceilometer/tls:z', '/dev/log:/dev/log', '/var/lib/openstack/healthchecks/ceilometer_agent_compute:/openstack:ro,z']}, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, tcib_managed=true, config_id=ceilometer_agent_compute, container_name=ceilometer_agent_compute, org.label-schema.build-date=20260127, org.label-schema.schema-version=1.0)

○ 834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20-309012fbd99c6d44.service - /usr/bin/podman healthcheck run 834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20
     Loaded: loaded (/run/systemd/transient/834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20-309012fbd99c6d44.service; transient)
  Transient: yes
     Active: inactive (dead) since Fri 2026-01-30 20:20:56 UTC; 28s ago
   Duration: 90ms
TriggeredBy: ● 834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20-309012fbd99c6d44.timer
    Process: 267907 ExecStart=/usr/bin/podman healthcheck run 834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20 (code=exited, status=0/SUCCESS)
   Main PID: 267907 (code=exited, status=0/SUCCESS)
        CPU: 94ms

Jan 30 20:20:56 compute-1 podman[267907]: 2026-01-30 20:20:56.572921178 +0000 UTC m=+0.073373599 container health_status 834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20 (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=ovn_metadata_agent, health_status=healthy, health_failing_streak=0, health_log=, config_id=ovn_metadata_agent, org.label-schema.schema-version=1.0, maintainer=OpenStack Kubernetes Operator team, managed_by=edpm_ansible, org.label-schema.build-date=20260127, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, container_name=ovn_metadata_agent, io.buildah.version=1.41.3, org.label-schema.vendor=CentOS, tcib_managed=true, config_data={'cgroupns': 'host', 'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': 'fb0ba4ae2b01ec1faae83a80acf21c3ad3948d5c7b1e5820f87360e814103f4d-f3391df1932a31a4475305bf7ba459820c071ee6bc26dffcd59f1c41f8b252f0-f3391df1932a31a4475305bf7ba459820c071ee6bc26dffcd59f1c41f8b252f0-f3391df1932a31a4475305bf7ba459820c071ee6bc26dffcd59f1c41f8b252f0-0823bd3e096c75f72e4a95820d41b0d4b6a1172bd2892ddb9f29b788a11bc87d'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_metadata_agent', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified', 'net': 'host', 'pid': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/run/openvswitch:/run/openvswitch:z', '/var/lib/openstack/neutron-ovn-metadata-agent:/etc/neutron.conf.d:z', '/run/netns:/run/netns:shared', '/var/lib/kolla/config_files/ovn_metadata_agent.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/neutron:/var/lib/neutron:shared,z', '/var/lib/neutron/ovn_metadata_haproxy_wrapper:/usr/local/bin/haproxy:ro', '/var/lib/neutron/kill_scripts:/etc/neutron/kill_scripts:ro', '/var/lib/openstack/cacerts/neutron-metadata/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/neutron-metadata/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_metadata_agent:/openstack:ro,z']})

○ 883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037-70e2637eddb374c4.service - /usr/bin/podman healthcheck run 883fb7cfe2203bf00aUnit apparmor.service could not be found.
Unit apt-daily.service could not be found.
05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037
     Loaded: loaded (/run/systemd/transient/883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037-70e2637eddb374c4.service; transient)
  Transient: yes
     Active: inactive (dead) since Fri 2026-01-30 20:20:56 UTC; 28s ago
   Duration: 136ms
TriggeredBy: ● 883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037-70e2637eddb374c4.timer
    Process: 267908 ExecStart=/usr/bin/podman healthcheck run 883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037 (code=exited, status=0/SUCCESS)
   Main PID: 267908 (code=exited, status=0/SUCCESS)
        CPU: 105ms

Jan 30 20:20:56 compute-1 podman[267908]: 2026-01-30 20:20:56.621322109 +0000 UTC m=+0.118330682 container health_status 883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037 (image=quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified, name=ovn_controller, health_status=healthy, health_failing_streak=0, health_log=, container_name=ovn_controller, org.label-schema.build-date=20260127, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, config_id=ovn_controller, tcib_managed=true, io.buildah.version=1.41.3, managed_by=edpm_ansible, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, config_data={'depends_on': ['openvswitch.service'], 'environment': {'KOLLA_CONFIG_STRATEGY': 'COPY_ALWAYS', 'EDPM_CONFIG_HASH': 'fb0ba4ae2b01ec1faae83a80acf21c3ad3948d5c7b1e5820f87360e814103f4d-f3391df1932a31a4475305bf7ba459820c071ee6bc26dffcd59f1c41f8b252f0-f3391df1932a31a4475305bf7ba459820c071ee6bc26dffcd59f1c41f8b252f0-f3391df1932a31a4475305bf7ba459820c071ee6bc26dffcd59f1c41f8b252f0'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/ovn_controller', 'test': '/openstack/healthcheck'}, 'image': 'quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified', 'net': 'host', 'privileged': True, 'restart': 'always', 'user': 'root', 'volumes': ['/lib/modules:/lib/modules:ro', '/run:/run', '/var/lib/openvswitch/ovn:/run/ovn:shared,z', '/var/lib/kolla/config_files/ovn_controller.json:/var/lib/kolla/config_files/config.json:ro', '/var/lib/openstack/cacerts/ovn/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro,z', '/var/lib/openstack/certs/ovn/default/ca.crt:/etc/pki/tls/certs/ovndbca.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.crt:/etc/pki/tls/certs/ovndb.crt:ro,z', '/var/lib/openstack/certs/ovn/default/tls.key:/etc/pki/tls/private/ovndb.key:ro,Z', '/var/lib/openstack/healthchecks/ovn_controller:/openstack:ro,z']}, maintainer=OpenStack Kubernetes Operator team, org.label-schema.license=GPLv2)

● auditd.service - Security Auditing Service
     Loaded: loaded (/usr/lib/systemd/system/auditd.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:auditd(8)
             https://github.com/linux-audit/audit-documentation
   Main PID: 704 (auditd)
         IO: 0B read, 29.6M written
      Tasks: 4 (limit: 100092)
     Memory: 17.5M (peak: 18.0M)
        CPU: 5.268s
     CGroup: /system.slice/auditd.service
             ├─704 /sbin/auditd
             └─706 /usr/sbin/sedispatch

Jan 30 16:48:35 localhost augenrules[724]: failure 1
Jan 30 16:48:35 localhost augenrules[724]: pid 704
Jan 30 16:48:35 localhost augenrules[724]: rate_limit 0
Jan 30 16:48:35 localhost augenrules[724]: backlog_limit 8192
Jan 30 16:48:35 localhost augenrules[724]: lost 0
Jan 30 16:48:35 localhost augenrules[724]: backlog 4
Jan 30 16:48:35 localhost augenrules[724]: backlog_wait_time 60000
Jan 30 16:48:35 localhost augenrules[724]: backlog_wait_time_actual 0
Jan 30 16:48:35 localhost systemd[1]: Started Security Auditing Service.
Jan 30 17:43:21 compute-1 auditd[704]: Audit daemon rotating log files

○ auth-rpcgss-module.service - Kernel Module supporting RPCSEC_GSS
     Loaded: loaded (/usr/lib/systemd/system/auth-rpcgss-module.service; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-3Unit auto-cpufreq.service could not be found.
Unit autofs.service could not be found.
0 16:48:35 UTC; 3h 32min ago

○ bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98-7fd10ef3aba79581.service - /usr/bin/podman healthcheck run bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98
     Loaded: loaded (/run/systemd/transient/bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98-7fd10ef3aba79581.service; transient)
  Transient: yes
     Active: inactive (dead) since Fri 2026-01-30 20:21:15 UTC; 9s ago
   Duration: 75ms
TriggeredBy: ● bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98-7fd10ef3aba79581.timer
    Process: 270538 ExecStart=/usr/bin/podman healthcheck run bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98 (code=exited, status=0/SUCCESS)
   Main PID: 270538 (code=exited, status=0/SUCCESS)
        CPU: 75ms

Jan 30 20:21:15 compute-1 podman[270538]: 2026-01-30 20:21:15.554039915 +0000 UTC m=+0.063147020 container health_status bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98 (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, health_status=healthy, health_failing_streak=0, health_log=, config_data={'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'CONTAINER_HOST': 'unix:///run/podman/podman.sock', 'OS_ENDPOINT_TYPE': 'internal', 'EDPM_CONFIG_HASH': 'f3391df1932a31a4475305bf7ba459820c071ee6bc26dffcd59f1c41f8b252f0-1906ca889f747a7874c38f7dde1e751324374c17012dee147154da58b6efddd8'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/podman_exporter', 'test': '/openstack/healthcheck podman_exporter'}, 'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'net': 'host', 'ports': ['9882:9882'], 'privileged': True, 'recreate': True, 'restart': 'always', 'user': 'root', 'volumes': ['/var/lib/openstack/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=podman_exporter, container_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)

○ blk-availability.service - Availability of block devices
     Loaded: loaded (/usr/lib/systemd/system/blk-availability.service; disabled; preset: disabled)
     Active: inactive (dead)

● chronyd.service - NTP client/server
     Loaded: loaded (/usr/lib/systemd/system/chronyd.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 17:30:55 UTC; 2h 50min ago
       Docs: man:chronyd(8)
             man:chrony.conf(5)
   Main PID: 65304 (chronyd)
         IO: 0B read, 8.0K written
      Tasks: 1 (limit: 100092)
     Memory: 1.0M (peak: 2.2M)
        CPU: 63ms
     CGroup: /system.slice/chronyd.service
             └─65304 /usr/sbin/chronyd -F 2

Jan 30 17:30:55 compute-1 systemd[1]: Starting NTP client/server...
Jan 30 17:30:55 compute-1 chronyd[65304]: chronyd version 4.8 starting (+CMDMON +REFCLOCK +RTC +PRIVDROP +SCFILTER +SIGND +NTS +SECHASH +IPV6 +DEBUG)
Jan 30 17:30:55 compute-1 chronyd[65304]: Frequency -28.256 +/- 0.166 ppm read from /var/lib/chrony/drift
Jan 30 17:30:55 compute-1 chronyd[65304]: Loaded seccomp filter (level 2)
Jan 30 17:30:55 compute-1 systemd[1]: Started NTP client/server.
Jan 30 17:33:06 compute-1 chronyd[65304]: Selected source 198.161.203.36 (pool.ntp.org)

● cloud-config.service - Cloud-init: Config Stage
     Loaded: loaded (/usr/lib/systemd/system/cloud-config.service; enabled; preset: disabled)
     Active: active (exited) since Fri 2026-01-30 16:48:44 UTC; 3h 32min ago
   Main PID: 1003 (code=exited, status=0/SUCCESS)
        CPU: 438ms

Jan 30 16:48:43 np0005602931.novalocal systemd[1]: Starting Cloud-init: Config Stage...
Jan 30 16:48:44 np0005602931.novalocal cloud-init[1065]: Cloud-init v. 24.4-8.el9 running 'modules:config' at Fri, 30 Jan 2026 16:48:44 +0000. Up 13.59 seconds.
Jan 30 16:48:44 np0005602931.novalocal systemd[1]: Finished Cloud-init: Config Stage.

● cloud-final.service - Cloud-init: Final Stage
     Loaded: loaded (/usr/lib/systemd/system/cloud-final.service; enabled; preset: disabled)
     Active: active (exited) since Fri 2026-01-30 16:48:44 UTC; 3h 32min ago
   Main PID: 1103 (code=exited, status=0/SUCCESS)
         IO: 0B read, 0B written
      Tasks: 0
     Memory: 316.0K (peak: 34.4M)
        CPU: 493ms
     CGroup: /system.slice/cloud-final.service

Jan 30 16:48:44 np0005602931.novalocal systemd[1]: Starting Cloud-init: Final Stage...
Jan 30 16:48:44 np0005602931.novalocal cloud-init[1260]: Cloud-init v. 24.4-8.el9 running 'modules:final' at Fri, 30 Jan 2026 16:48:44 +0000. Up 14.04 seconds.
Jan 30 16:48:44 np0005602931.novalocal cloud-init[1288]: #############################################################
Jan 30 16:48:44 np0005602931.novalocal cloud-init[1289]: -----BEGIN SSH HOST KEY FINGERPRINTS-----
Jan 30 16:48:44 np0005602931.novalocal cloud-init[1295]: 256 SHA256:4NQDGJ+gq3va4AWCRjbEcbmiFli18xXRNjD6S7CLSCE root@np0005602931.novalocal (ECDSA)
Jan 30 16:48:44 np0005602931.novalocal cloud-init[1301]: 256 SHA256:Nz8Kfwlkx786yt0kl4c1Mc4GTG2VtyU5vl+bFV77JKc root@np0005602931.novalocal (ED25519)
Jan 30 16:48:44 np0005602931.novalocal cloud-init[1313]: #############################################################
Jan 30 16:48:44 np0005602931.novalocal cloud-init[1260]: Cloud-init v. 24.4-8.el9 finished at Fri, 30 Jan 2026 16:48:44 +0000. Datasource DataSourceConfigDrive [net,ver=2][source=/dev/sr0].  Up 14.27 seconds
Jan 30 16:48:44 np0005602931.novalocal systemd[1]: Finished Cloud-init: Final Stage.

● cloud-init-local.service - Cloud-init: Local Stage (pre-network)
     Loaded: loaded (/usr/lib/systemd/system/cloud-init-local.service; enabled; preset: disabled)
     Active: active (exited) since Fri 2026-01-30 16:48:37 UTC; 3h 32min ago
   Main PID: 779 (code=exited, status=0/SUCCESS)
        CPU: 730ms

Jan 30 16:48:36 localhost systemd[1]: Starting Cloud-init: Local Stage (pre-network)...
Jan 30 16:48:37 localhost cloud-init[840]: Cloud-init v. 24.4-8.el9 running 'init-local' at Fri, 30 Jan 2026 16:48:37 +0000. Up 6.73 seconds.
Jan 30 16:48:37 np0005602931.novalocal systemd[1]: Finished Cloud-init: Local Stage (pre-network).

● cloud-init.service - Cloud-init: Network Stage
     Loaded: loaded (/usr/lib/systemd/system/cloud-init.service; enabled; preset: disabled)
     Active: active (exited) since Fri 2026-01-30 16:48:43 UTC; 3h 32min ago
   Main PID: 891 (code=exited, status=0/SUCCESS)
        CPU: 1.043s

Jan 30 16:48:43 np0005602931.novalocal cloud-init[922]: |            o ++o|
Jan 30 16:48:43 np0005602931.novalocal cloud-init[922]: |           . +.=+|
Jan 30 16:48:43 np0005602931.novalocal cloud-init[922]: |          o o =o+|
Jan 30 16:48:43 np0005602931.novalocal cloud-init[922]: |        Soo. ..*=|
Jan 30 16:48:43 np0005602931.novalocal cloud-init[922]: |         ..o  =**|
Jan 30 16:48:43 np0005602931.novalocal cloud-init[922]: |        .  .+.=BB|
Jan 30 16:48:43 np0005602931.novalocal cloud-init[922]: |         + o+*E+o|
Jan 30 16:48:43 np0005602931.novalocal cloud-init[922]: |          =ooo.  |
Jan 30 16:48:43 np0005602931.novalocal cloud-init[922]: +----[SHA256]-----+
Jan 30 16:48:43 np0005602931.novalocal systemd[1]: Finished Cloud-init: Network Stage.

○ cpupower.service - Configure CPU power related settings
     Loaded: loaded (/usr/lib/systemd/system/cpupower.service; disabled; preset: disabled)
     Active: inactive (dead)

● crond.service - Command Scheduler
     Loaded: loaded (/usr/lib/systemd/system/crond.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 16:48:43 UTC; 3h 32min ago
   Main PID: 1010 (crond)
         IO: 36.0K read, 12.0K written
      Tasks: 1 (limit: 100092)
     Memory: 1.3M (peak: 4.9M)
        CPU: 301ms
     CGroup: /system.slice/crond.service
             └─1010 /usr/sbin/crond -n

Jan 30 17:55:01 compute-1 anacron[7753]: Job `cron.monthly' started
Jan 30 17:55:01 compute-1 anacron[7753]: Job `cron.monthly' terminated
Jan 30 17:55:01 compute-1 anacron[Unit display-manager.service could not be found.
7753]: Normal exit (3 jobs run)
Jan 30 18:01:01 compute-1 CROND[217229]: (root) CMD (run-parts /etc/cron.hourly)
Jan 30 18:01:01 compute-1 CROND[217228]: (root) CMDEND (run-parts /etc/cron.hourly)
Jan 30 19:01:01 compute-1 CROND[237788]: (root) CMD (run-parts /etc/cron.hourly)
Jan 30 19:01:01 compute-1 run-parts[237797]: (/etc/cron.hourly) finished 0anacron
Jan 30 19:01:01 compute-1 CROND[237787]: (root) CMDEND (run-parts /etc/cron.hourly)
Jan 30 20:01:01 compute-1 CROND[261350]: (root) CMD (run-parts /etc/cron.hourly)
Jan 30 20:01:01 compute-1 CROND[261349]: (root) CMDEND (run-parts /etc/cron.hourly)

● dbus-broker.service - D-Bus System Message Bus
     Loaded: loaded (/usr/lib/systemd/system/dbus-broker.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
TriggeredBy: ● dbus.socket
       Docs: man:dbus-broker-launch(1)
   Main PID: 765 (dbus-broker-lau)
         IO: 0B read, 0B written
      Tasks: 2 (limit: 100092)
     Memory: 2.9M (peak: 3.7M)
        CPU: 5.952s
     CGroup: /system.slice/dbus-broker.service
             ├─765 /usr/bin/dbus-broker-launch --scope system --audit
             └─776 dbus-broker --log 4 --controller 9 --machine-id bf0bc0bb03de29b24cba1cc9599cf5d0 --max-bytes 536870912 --max-fds 4096 --max-matches 131072 --audit

Jan 30 17:26:18 compute-1 dbus-broker-launch[765]: Noticed file-system modification, trigger reload.
Jan 30 17:29:02 compute-1 dbus-broker-launch[776]: avc:  op=load_policy lsm=selinux seqno=7 res=1
Jan 30 17:29:13 compute-1 dbus-broker-launch[776]: avc:  op=load_policy lsm=selinux seqno=8 res=1
Jan 30 17:33:15 compute-1 dbus-broker-launch[776]: avc:  op=load_policy lsm=selinux seqno=9 res=1
Jan 30 17:36:35 compute-1 dbus-broker-launch[776]: avc:  op=load_policy lsm=selinux seqno=10 res=1
Jan 30 17:36:39 compute-1 dbus-broker-launch[776]: avc:  op=load_policy lsm=selinux seqno=11 res=1
Jan 30 17:37:21 compute-1 dbus-broker-launch[765]: Noticed file-system modification, trigger reload.
Jan 30 17:37:21 compute-1 dbus-broker-launch[776]: avc:  op=load_policy lsm=selinux seqno=12 res=1
Jan 30 17:37:21 compute-1 dbus-broker-launch[765]: Noticed file-system modification, trigger reload.
Jan 30 17:39:23 compute-1 dbus-broker-launch[776]: avc:  op=load_policy lsm=selinux seqno=13 res=1

○ dm-event.service - Device-mapper event daemon
     Loaded: loaded (/usr/lib/systemd/system/dm-event.service; static)
     Active: inactive (dead)
TriggeredBy: ● dm-event.socket
       Docs: man:dmeventd(8)

○ dnf-makecache.service - dnf makecache
     Loaded: loaded (/usr/lib/systemd/system/dnf-makecache.service; static)
     Active: inactive (dead) since Fri 2026-01-30 20:08:01 UTC; 13min ago
TriggeredBy: ● dnf-makecache.timer
    Process: 263522 ExecStart=/usr/bin/dnf makecache --timer (code=exited, status=0/SUCCESS)
   Main PID: 263522 (code=exited, status=0/SUCCESS)
        CPU: 220ms

Jan 30 20:08:01 compute-1 systemd[1]: Starting dnf makecache...
Jan 30 20:08:01 compute-1 dnf[263522]: Metadata cache refreshed recently.
Jan 30 20:08:01 compute-1 systemd[1]: dnf-makecache.service: Deactivated successfully.
Jan 30 20:08:01 compute-1 systemd[1]: Finished dnf makecache.

○ dracut-cmdline.service - dracut cmdline hook
     Loaded: loaded (/usr/lib/systemd/system/dracut-cmdline.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
   Duration: 1.885s
       Docs: man:dracut-cmdline.service(8)
             man:dracut.bootup(7)
   Main PID: 328 (code=exited, status=0/SUCCESS)
        CPU: 100ms

Jan 30 16:48:32 localhost systemd[1]: Starting dracut cmdline hook...
Jan 30 16:48:32 localhost dracut-cmdline[328]: dracut-9 dracut-057-102.git20250818.el9
Jan 30 16:48:32 localhost dracut-cmdline[328]: Using kernel command line parameters:    BOOT_IMAGE=(hd0,msdos1)/boot/vmlinuz-5.14.0-665.el9.x86_64 root=UUID=822f14ea-6e7e-41df-b0d8-fbe282d9ded8 ro console=ttyS0,115200n8 no_timer_check net.ifnames=0 crashkernel=1G-2G:192M,2G-64G:256M,64G-:512M
Jan 30 16:48:32 localhost systemd[1]: Finished dracut cmdline hook.
Jan 30 16:48:34 localhost systemd[1]: dracut-cmdline.service: Deactivated successfully.
Jan 30 16:48:34 localhost systemd[1]: Stopped dracut cmdline hook.

○ dracut-initqueue.service - dracut initqueue hook
     Loaded: loaded (/usr/lib/systemd/system/dracut-initqueue.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
   Duration: 1.081s
       Docs: man:dracut-initqueue.service(8)
             man:dracut.bootup(7)
   Main PID: 504 (code=exited, status=0/SUCCESS)
        CPU: 25ms

Jan 30 16:48:32 localhost systemd[1]: Starting dracut initqueue hook...
Jan 30 16:48:33 localhost systemd[1]: Finished dracut initqueue hook.
Jan 30 16:48:34 localhost systemd[1]: dracut-initqueue.service: Deactivated successfully.
Jan 30 16:48:34 localhost systemd[1]: Stopped dracut initqueue hook.

○ dracut-mount.service - dracut mount hook
     Loaded: loaded (/usr/lib/systemd/system/dracut-mount.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
   Duration: 137ms
       Docs: man:dracut-mount.service(8)
             man:dracut.bootup(7)
   Main PID: 569 (code=exited, status=0/SUCCESS)
        CPU: 14ms

Jan 30 16:48:33 localhost systemd[1]: Starting dracut mount hook...
Jan 30 16:48:33 localhost systemd[1]: Finished dracut mount hook.
Jan 30 16:48:34 localhost systemd[1]: dracut-mount.service: Deactivated successfully.
Jan 30 16:48:34 localhost systemd[1]: Stopped dracut mount hook.

○ dracut-pre-mount.service - dracut pre-mount hook
     Loaded: loaded (/usr/lib/systemd/system/dracut-pre-mount.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
   Duration: 1.047s
       Docs: man:dracut-pre-mount.service(8)
             man:dracut.bootup(7)
   Main PID: 547 (code=exited, status=0/SUCCESS)
        CPU: 8ms

Jan 30 16:48:33 localhost systemd[1]: Starting dracut pre-mount hook...
Jan 30 16:48:33 localhost systemd[1]: Finished dracut pre-mount hook.
Jan 30 16:48:34 localhost systemd[1]: dracut-pre-mount.service: Deactivated successfully.
Jan 30 16:48:34 localhost systemd[1]: Stopped dracut pre-mount hook.

○ dracut-pre-pivot.service - dracut pre-pivot and cleanup hook
     Loaded: loaded (/usr/lib/systemd/system/dracut-pre-pivot.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
   Duration: 25ms
       Docs: man:dracut-pre-pivot.service(8)
             man:dracut.bootup(7)
   Main PID: 574 (code=exited, status=0/SUCCESS)
        CPU: 75ms

Jan 30 16:48:33 localhost systemd[1]: Starting dracut pre-pivot and cleanup hook...
Jan 30 16:48:34 localhost systemd[1]: Finished dracut pre-pivot and cleanup hook.
Jan 30 16:48:34 localhost systemd[1]: dracut-pre-pivot.service: Deactivated successfully.
Jan 30 16:48:34 localhost systemd[1]: Stopped dracut pre-pivot and cleanup hook.

○ dracut-pre-trigger.service - dracut pre-trigger hook
     Loaded: loaded (/usr/lib/systemd/system/dracut-pre-trigger.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
   Duration: 1.568s
       Docs: man:dracut-pre-trigger.service(8)
             man:dracut.bootup(7)
   Main PID: 468 (code=exited, status=0/SUCCESS)
        CPU: 14ms

Jan 30 16:48:32 localhost systemd[1]: Starting dracut pre-trigger hook...
Jan 30 16:48:32 localhost systemd[1]: Finished dracut pre-trigger hook.
Jan 30 16:48:34 localhost systemd[1]: dracut-pre-trigger.service: Deactivated successfully.
Jan 30 16:48:34 localhost systemd[1]: Stopped dracut pre-trigger hook.

○ dracut-pre-udev.service - dracut pre-udev hook
     Loaded: loaded (/usr/lib/systemd/system/dracut-pre-udev.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
   Duration: 1.659s
       Docs: man:dracut-pre-udev.service(8)
             man:dracut.bootup(7)
   Main PID: 417 (code=exited, status=0/SUCCESS)
        CPU: 219ms

Jan 30 16:48:32 localhost systemd[1]: Starting dracut pre-udev hook...
Jan 30 16:48:32 localhost rpc.statd[445]: Version 2.5.4 starting
Jan 30 16:48:32 localhost rpc.statd[445]: Initializing NSM state
Jan 30 16:48:32 localhost rpc.idmapd[450]: Setting log level to 0
Jan 30 16:48:32 localhost systemd[1]: Finished dracut pre-udev hook.
Jan 30 16:48:34 localhost rpc.idmapd[450]: exiting on signal 15
Jan 30 16:48:34 localhost systemd[1]: dracut-pre-udev.service: Deactivated successfully.
Jan 30 16:48:34 localhost systemd[1]: Stopped dracut pre-udev hook.

○ dracut-shutdown-onfailure.service - Service executing upon dracut-shutdown failure to perform cleanup
     Loaded: loaded (/usr/lib/systemd/system/dracut-shutdown-onfailure.service; static)
     Active: inactive (dead)
       Docs: man:dracut-shutdown.service(8)

● dracut-shutdown.service - Restore /run/initramfs on shutdown
     Loaded: loaded (/usr/lib/systemd/system/dracut-shutdown.service; static)
     Active: active (exited) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
       Docs: man:dracut-shutdown.service(8)
   Main PID: 782 (code=exited, status=0/SUCCESS)
        CPU: 2ms

Jan 30 16:48:36 localhost systemd[1]: Starting Restore /run/initramfs on shutdown...
Jan 30 16:48:36 localhost systemd[1]: Finished Restore /run/initramfs on shutdown.

○ e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a-7054953e52e79992.service - /usr/bin/podman healthcheck run e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a
     Loaded: loaded (/run/systemd/transient/e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a-7054953e52e79992.service; transient)
  Transient: yes
     Active: inactive (dead) since Fri 2026-01-30 20:21:05 UTC; 19s ago
   Duration: 104ms
TriggeredBy: ● e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a-7054953e52e79992.timer
    Process: 268675 ExecStart=/usr/bin/podman healthcheck run e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a (code=exited, status=0/SUCCESS)
   Main PID: 268675 (code=exited, status=0/SUCCESS)
        CPU: 103ms

Jan 30 20:21:05 compute-1 podman[268675]: 2026-01-30 20:21:05.720841733 +0000 UTC m=+0.083036232 container health_status e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, health_status=healthy, health_failing_streak=0, health_log=, vendor=Red Hat, Inc., version=9.7, distribution-scope=public, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., release=1769056855, architecture=x86_64, org.opencontainers.image.created=2026-01-22T05:09:47Z, config_data={'command': [], 'environment': {'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml', 'OS_ENDPOINT_TYPE': 'internal', 'EDPM_CONFIG_HASH': 'f3391df1932a31a4475305bf7ba459820c071ee6bc26dffcd59f1c41f8b252f0-1906ca889f747a7874c38f7dde1e751324374c17012dee147154da58b6efddd8'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter', 'test': '/openstack/healthcheck openstack-netwo'}, 'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'net': 'host', 'ports': ['9105:9105'], 'privileged': True, 'recreate': True, 'restart': 'always', 'volumes': ['/var/lib/openstack/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., io.buildah.version=1.33.7, com.redhat.component=ubi9-minimal-container, com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.openshift.tags=minimal rhel9, url=https://catalog.redhat.com/en/search?searchType=containers, cpe=cpe:/a:redhat:enterprise_linux:9::appstream, managed_by=edpm_ansible, name=ubi9/ubi-minimal, container_name=openstack_network_exporter, org.opencontainers.image.revision=812a20485e9d8d728e95b468c2886da21352b9fc, vcs-type=git, build-date=2026-01-22T05:09:47Z, config_id=openstack_network_exporter, vcs-ref=812a20485e9d8d728e95b468c2886da21352b9fc, io.openshift.expose-services=, io.k8s.display-name=Red Hat Universal Base Image 9 Minimal, maintainer=Red Hat, Inc., summary=Provides the latest release of the minimal Red Hat Universal Base Image 9.)

● edpm-container-shutdown.service - EDPM Container Shutdown
     Loaded: loaded (/etc/systemd/system/edpm-container-shutdown.service; enabled; preset: enabled)
     Active: active (exited) since Fri 2026-01-30 17:31:13 UTC; 2h 50min ago
       Docs: https://github.com/openstack-k8s-operators/docs
   Main PID: 67797 (code=exited, status=0/SUCCESS)
        CPU: 2ms

Jan 30 17:31:13 compute-1 systemd[1]: Starting EDPM Container Shutdown...
Jan 30 17:31:13 compute-1 systemd[1]: Finished EDPM Container Shutdown.

● edpm_ceilometer_agent_compute.service - ceilometer_agent_compute container
     Loaded: loaded (/etc/systemd/system/edpm_ceilometer_agent_compute.service; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:43:22 UTC; 2h 38min ago
   Main PID: 193153 (conmon)
         IO: 0B read, 565.5K written
      Tasks: 1 (limit: 100092)
     Memory: 708.0K (peak: 18.6M)
        CPU: 1.051s
     CGroup: /system.slice/edpm_ceilometer_agent_compute.service
             └─193153 /usr/bin/conmon --api-version 1 -c 4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd -u 4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd/userdata -p /run/containers/storage/overlay-containers/4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd/userdata/pidfile -n ceilometer_agent_compute --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd/userdata/oci-log --conmon-pidfile /run/ceilometer_agent_compute.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd

Jan 30 20:21:23 compute-1 ceilometer_agent_compute[193153]: 2026-01-30 20:21:23.660 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Jan 30 20:21:23 compute-1 ceilometer_agent_compute[193153]: 2026-01-30 20:21:23.660 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Jan 30 20:21:23 compute-1 ceilometer_agent_compute[193153]: 2026-01-30 20:21:23.660 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Jan 30 20:21:23 compute-1 ceilometer_agent_compute[193153]: 2026-01-30 20:21:23.660 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Jan 30 20:21:23 compute-1 ceilometer_agent_compute[193153]: 2026-01-30 20:21:23.660 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Jan 30 20:21:23 compute-1 ceilometer_agent_compute[193153]: 2026-01-30 20:21:23.660 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Jan 30 20:21:23 compute-1 ceilometer_agent_compute[193153]: 2026-01-30 20:21:23.660 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Jan 30 20:21:23 compute-1 ceilometer_agent_compute[193153]: 2026-01-30 20:21:23.660 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Jan 30 20:21:23 compute-1 ceilometer_agent_compute[193153]: 2026-01-30 20:21:23.660 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Jan 30 20:21:23 compute-1 ceilometer_agent_compute[193153]: 2026-01-30 20:21:23.660 12 ERROR oslo_messaging.notify.messaging 

○ edpm_libvirt_guests.service - Suspend libvirt Guests in edpm
     Loaded: loaded (/etc/systemd/system/edpm_libvirt_guests.service; enabled; preset: disabled)
     Active: inactive (dead)
       Docs: man:libvirtd(8)
             https://libvirt.org

● edpm_node_exporter.service - node_exporter container
     Loaded: loaded (/etc/systemd/system/edpm_node_exporter.service; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:43:37 UTC; 2h 37min ago
   Main PID: 196151 (conmon)
         IO: 0B read, 129.0K written
      Tasks: 1 (limit: 100092)
     Memory: 692.0K (peak: 17.4M)
        CPU: 107ms
     CGroup: /system.slice/edpm_node_exporter.service
             └─196151 /usr/bin/conmon --api-version 1 -c 407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a -u 407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a/userdata -p /run/containers/storage/overlay-containers/407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a/userdata/pidfile -n node_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a/userdata/oci-log --conmon-pidfile /run/node_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a

Jan 30 17:43:37 compute-1 node_exporter[196151]: ts=2026-01-30T17:43:37.016Z caller=node_exporter.go:117 level=info collector=tapestats
Jan 30 17:43:37 compute-1 node_exporter[196151]: ts=2026-01-30T17:43:37.016Z caller=node_exporter.go:117 level=info collector=udp_queues
Jan 30 17:43:37 compute-1 node_exporter[196151]: ts=2026-01-30T17:43:37.016Z caller=node_exporter.go:117 level=info collector=vmstat
Jan 30 17:43:37 compute-1 node_exporter[196151]: ts=2026-01-30T17:43:37.016Z caller=node_exporter.go:117 level=info collector=xfs
Jan 30 17:43:37 compute-1 node_exporter[196151]: ts=2026-01-30T17:43:37.016Z caller=node_exporter.go:117 level=info collector=zfs
Jan 30 17:43:37 compute-1 node_exporter[196151]: ts=2026-01-30T17:43:37.018Z caller=tls_config.go:232 level=info msg="Listening on" address=[::]:9100
Jan 30 17:43:37 compute-1 node_exporter[196151]: ts=2026-01-30T17:43:37.018Z caller=tls_config.go:268 level=info msg="TLS is enabled." http2=true address=[::]:9100
Jan 30 17:43:37 compute-1 podman[196136]: 2026-01-30 17:43:37.029538894 +0000 UTC m=+0.131728501 container start 407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a (image=quay.io/prometheus/node-exporter:v1.5.0, name=node_exporter, container_name=node_exporter, maintainer=The Prometheus Authors <prometheus-developers@googlegroups.com>, managed_by=edpm_ansible, config_data={'command': ['--web.config.file=/etc/node_exporter/node_exporter.yaml', '--web.disable-exporter-metrics', '--collector.systemd', '--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service', '--no-collector.dmi', '--no-collector.entropy', '--no-collector.thermal_zone', '--no-collector.time', '--no-collector.timex', '--no-collector.uname', '--no-collector.stat', '--no-collector.hwmon', '--no-collector.os', '--no-collector.selinux', '--no-collector.textfile', '--no-collector.powersupplyclass', '--no-collector.pressure', '--no-collector.rapl'], 'environment': {'OS_ENDPOINT_TYPE': 'internal', 'EDPM_CONFIG_HASH': 'f3391df1932a31a4475305bf7ba459820c071ee6bc26dffcd59f1c41f8b252f0-1906ca889f747a7874c38f7dde1e751324374c17012dee147154da58b6efddd8'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/node_exporter', 'test': '/openstack/healthcheck node_exporter'}, 'image': 'quay.io/prometheus/node-exporter:v1.5.0', 'net': 'host', 'ports': ['9100:9100'], 'privileged': True, 'recreate': True, 'restart': 'always', 'user': 'root', 'volumes': ['/var/lib/openstack/telemetry/node_exporter.yaml:/etc/node_exporter/node_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/node_exporter/tls:z', '/var/run/dbus/system_bus_socket:/var/run/dbus/system_bus_socket:rw', '/var/lib/openstack/healthchecks/node_exporter:/openstack:ro,z']}, config_id=node_exporter)
Jan 30 17:43:37 compute-1 podman[196136]: node_exporter
Jan 30 17:43:37 compute-1 systemd[1]: Started node_exporter container.

● edpm_nova_compute.service - nova_compute container
     Loaded: loaded (/etc/systemd/system/edpm_nova_compute.service; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:42:21 UTC; 2h 39min ago
   Main PID: 183471 (conmon)
         IO: 0B read, 88.5K written
      Tasks: 1 (limit: 100092)
     Memory: 696.0K (peak: 17.2M)
        CPU: 2.259s
     CGroup: /system.slice/edpm_nova_compute.service
             └─183471 /usr/bin/conmon --api-version 1 -c 455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b -u 455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b/userdata -p /run/containers/storage/overlay-containers/455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b/userdata/pidfile -n nova_compute --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b/userdata/oci-log --conmon-pidfile /run/nova_compute.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b

Jan 30 20:21:17 compute-1 nova_compute[183471]: 2026-01-30 20:21:17.325 183475 DEBUG oslo_concurrency.lockutils [None req-e5da1f1e-88a6-46e3-9865-7e7c36fcd9e8 - - - - - -] Acquired lock "refresh_cache-79f0f99a-6f26-49ed-ac15-d149313db321" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:315[00m
Jan 30 20:21:17 compute-1 nova_compute[183471]: 2026-01-30 20:21:17.326 183475 DEBUG nova.network.neutron [None req-e5da1f1e-88a6-46e3-9865-7e7c36fcd9e8 - - - - - -] [instance: 79f0f99a-6f26-49ed-ac15-d149313db321] Forcefully refreshing network info cache for instance _get_instance_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:2004[00m
Jan 30 20:21:17 compute-1 nova_compute[183471]: 2026-01-30 20:21:17.326 183475 DEBUG nova.objects.instance [None req-e5da1f1e-88a6-46e3-9865-7e7c36fcd9e8 - - - - - -] Lazy-loading 'info_cache' on Instance uuid 79f0f99a-6f26-49ed-ac15-d149313db321 obj_load_attr /usr/lib/python3.9/site-packages/nova/objects/instance.py:1105[00m
Jan 30 20:21:17 compute-1 nova_compute[183471]: 2026-01-30 20:21:17.364 183475 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 30 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263[00m
Jan 30 20:21:19 compute-1 nova_compute[183471]: 2026-01-30 20:21:19.482 183475 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 30 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263[00m
Jan 30 20:21:21 compute-1 nova_compute[183471]: 2026-01-30 20:21:21.686 183475 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 30 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263[00m
Jan 30 20:21:22 compute-1 nova_compute[183471]: 2026-01-30 20:21:22.369 183475 DEBUG ovsdbapp.backend.ovs_idl.vlog [-] [POLLIN] on fd 30 __log_wakeup /usr/lib64/python3.9/site-packages/ovs/poller.py:263[00m
Jan 30 20:21:22 compute-1 nova_compute[183471]: 2026-01-30 20:21:22.989 183475 DEBUG nova.network.neutron [None req-e5da1f1e-88a6-46e3-9865-7e7c36fcd9e8 - - - - - -] [instance: 79f0f99a-6f26-49ed-ac15-d149313db321] Updating instance_info_cache with network_info: [{"id": "98614c39-5614-41c7-b019-d8b2cb213278", "address": "fa:16:3e:28:e9:1d", "network": {"id": "20be9363-8d31-4010-8379-2f4db75ec5ee", "bridge": "br-int", "label": "tempest-test-network--572011885", "subnets": [{"cidr": "192.168.0.0/24", "dns": [], "gateway": {"address": "192.168.0.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.0.226", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "192.168.122.239", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "c0aa7510026d41d3926e3156ef6ca6da", "mtu": 1342, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tap98614c39-56", "ovs_interfaceid": "98614c39-5614-41c7-b019-d8b2cb213278", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}, {"id": "f2850036-f5fc-4aba-9f0d-2fa95e84e442", "address": "fa:16:3e:0b:98:69", "network": {"id": "311f9b45-49be-4345-9ac8-a1fe5a0b8a53", "bridge": "br-int", "label": "tempest-test-network--240217126", "subnets": [{"cidr": "192.168.1.0/24", "dns": [], "gateway": {"address": null, "type": "gateway", "version": null, "meta": {}}, "ips": [{"address": "192.168.1.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "c0aa7510026d41d3926e3156ef6ca6da", "mtu": 1342, "physical_network": null, "tunneled": true}}, "type": "ovs", "details": {"port_filter": true, "connectivity": "l2", "bridge_name": "br-int", "datapath_type": "system", "bound_drivers": {"0": "ovn"}}, "devname": "tapf2850036-f5", "ovs_interfaceid": "f2850036-f5fc-4aba-9f0d-2fa95e84e442", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] update_instance_cache_with_nw_info /usr/lib/python3.9/site-packages/nova/network/neutron.py:116[00m
Jan 30 20:21:23 compute-1 nova_compute[183471]: 2026-01-30 20:21:23.031 183475 DEBUG oslo_concurrency.lockutils [None req-e5da1f1e-88a6-46e3-9865-7e7c36fcd9e8 - - - - - -] Releasing lock "refresh_cache-79f0f99a-6f26-49ed-ac15-d149313db321" lock /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:333[00m
Jan 30 20:21:23 compute-1 nova_compute[183471]: 2026-01-30 20:21:23.032 183475 DEBUG nova.compute.manager [None req-e5da1f1e-88a6-46e3-9865-7e7c36fcd9e8 - - - - - -] [instance: 79f0f99a-6f26-49ed-ac15-d149313db321] Updated the network info_cache for instance _heal_instance_info_cache /usr/lib/python3.9/site-packages/nova/compute/manager.py:9929[00m

● edpm_openstack_network_exporter.service - openstack_network_exporter container
     Loaded: loaded (/etc/systemd/system/edpm_openstack_network_exporter.service; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:44:14 UTC; 2h 37min ago
   Main PID: 202408 (conmon)
         IO: 0B read, 126.0K written
      Tasks: 1 (limit: 100092)
     Memory: 688.0K (peak: 17.1M)
        CPU: 99ms
     CGroup: /system.slice/edpm_openstack_network_exporter.service
             └─202408 /usr/bin/conmon --api-version 1 -c e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a -u e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a/userdata -p /run/containers/storage/overlay-containers/e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a/userdata/pidfile -n openstack_network_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a/userdata/oci-log --conmon-pidfile /run/openstack_network_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a

Jan 30 17:44:14 compute-1 openstack_network_exporter[202408]: INFO    17:44:14 main.go:55: *ovnnorthd.Collector not registered, metric set not enabled
Jan 30 17:44:14 compute-1 openstack_network_exporter[202408]: INFO    17:44:14 main.go:48: registering *ovn.Collector
Jan 30 17:44:14 compute-1 openstack_network_exporter[202408]: INFO    17:44:14 main.go:55: *ovsdbserver.Collector not registered, metric set not enabled
Jan 30 17:44:14 compute-1 openstack_network_exporter[202408]: INFO    17:44:14 main.go:48: registering *pmd_perf.Collector
Jan 30 17:44:14 compute-1 openstack_network_exporter[202408]: INFO    17:44:14 main.go:48: registering *pmd_rxq.Collector
Jan 30 17:44:14 compute-1 openstack_network_exporter[202408]: INFO    17:44:14 main.go:48: registering *vswitch.Collector
Jan 30 17:44:14 compute-1 openstack_network_exporter[202408]: NOTICE  17:44:14 main.go:76: listening on https://:9105/metrics
Jan 30 17:44:14 compute-1 podman[202393]: 2026-01-30 17:44:14.124566585 +0000 UTC m=+0.120289562 container start e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a (image=quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified, name=openstack_network_exporter, io.openshift.tags=minimal rhel9, url=https://catalog.redhat.com/en/search?searchType=containers, vcs-type=git, io.openshift.expose-services=, description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., name=ubi9/ubi-minimal, org.opencontainers.image.revision=812a20485e9d8d728e95b468c2886da21352b9fc, release=1769056855, build-date=2026-01-22T05:09:47Z, cpe=cpe:/a:redhat:enterprise_linux:9::appstream, summary=Provides the latest release of the minimal Red Hat Universal Base Image 9., version=9.7, config_data={'command': [], 'environment': {'OPENSTACK_NETWORK_EXPORTER_YAML': '/etc/openstack_network_exporter/openstack_network_exporter.yaml', 'OS_ENDPOINT_TYPE': 'internal', 'EDPM_CONFIG_HASH': 'f3391df1932a31a4475305bf7ba459820c071ee6bc26dffcd59f1c41f8b252f0-1906ca889f747a7874c38f7dde1e751324374c17012dee147154da58b6efddd8'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/openstack_network_exporter', 'test': '/openstack/healthcheck openstack-netwo'}, 'image': 'quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified', 'net': 'host', 'ports': ['9105:9105'], 'privileged': True, 'recreate': True, 'restart': 'always', 'volumes': ['/var/lib/openstack/telemetry/openstack_network_exporter.yaml:/etc/openstack_network_exporter/openstack_network_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/openstack_network_exporter/tls:z', '/var/run/openvswitch:/run/openvswitch:rw,z', '/var/lib/openvswitch/ovn:/run/ovn:rw,z', '/proc:/host/proc:ro', '/var/lib/openstack/healthchecks/openstack_network_exporter:/openstack:ro,z']}, io.buildah.version=1.33.7, managed_by=edpm_ansible, org.opencontainers.image.created=2026-01-22T05:09:47Z, vendor=Red Hat, Inc., com.redhat.license_terms=https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI, io.k8s.description=The Universal Base Image Minimal is a stripped down image that uses microdnf as a package manager. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly., architecture=x86_64, com.redhat.component=ubi9-minimal-container, distribution-scope=public, container_name=openstack_network_exporter, vcs-ref=812a20485e9d8d728e95b468c2886da21352b9fc, config_id=openstack_network_exporter, maintainer=Red Hat, Inc., io.k8s.display-name=Red Hat Universal Base Image 9 Minimal)
Jan 30 17:44:14 compute-1 podman[202393]: openstack_network_exporter
Jan 30 17:44:14 compute-1 systemd[1]: Started openstack_network_exporter container.

● edpm_ovn_controller.service - ovn_controller container
     Loaded: loaded (/etc/systemd/system/edpm_ovn_controller.service; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 19:32:56 UTC; 48min ago
    Process: 251535 ExecStart=/var/local/libexec/edpm-start-podman-container ovn_controller (code=exited, status=0/SUCCESS)
   Main PID: 251552 (conmon)
         IO: 0B read, 109.0K written
      Tasks: 1 (limit: 100092)
     Memory: 676.0K (peak: 18.2M)
        CPU: 341ms
     CGroup: /system.slice/edpm_ovn_controller.service
             └─251552 /usr/bin/conmon --api-version 1 -c 883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037 -u 883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037/userdata -p /run/containers/storage/overlay-containers/883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037/userdata/pidfile -n ovn_controller --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037/userdata/oci-log --conmon-pidfile /run/ovn_controller.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037

Jan 30 20:20:08 compute-1 ovn_controller[251552]: 2026-01-30T20:20:08Z|00006|pinctrl(ovn_pinctrl0)|INFO|DHCPOFFER fa:16:3e:b1:22:7e 10.100.0.14
Jan 30 20:20:08 compute-1 ovn_controller[251552]: 2026-01-30T20:20:08Z|00007|pinctrl(ovn_pinctrl0)|INFO|DHCPACK fa:16:3e:b1:22:7e 10.100.0.14
Jan 30 20:20:21 compute-1 ovn_controller[251552]: 2026-01-30T20:20:21Z|00264|binding|INFO|Releasing lport e413b81f-0a7e-49fd-9502-f2fcdedb5966 from this chassis (sb_readonly=0)
Jan 30 20:20:21 compute-1 ovn_controller[251552]: 2026-01-30T20:20:21Z|00265|binding|INFO|Setting lport e413b81f-0a7e-49fd-9502-f2fcdedb5966 down in Southbound
Jan 30 20:20:21 compute-1 ovn_controller[251552]: 2026-01-30T20:20:21Z|00266|binding|INFO|Removing iface tape413b81f-0a ovn-installed in OVS
Jan 30 20:20:36 compute-1 ovn_controller[251552]: 2026-01-30T20:20:36Z|00267|binding|INFO|Releasing lport 4eddc8a5-3844-427f-b357-21fb6689422e from this chassis (sb_readonly=0)
Jan 30 20:20:36 compute-1 ovn_controller[251552]: 2026-01-30T20:20:36Z|00268|binding|INFO|Releasing lport 41c4aae8-ee2d-45e6-8bf6-7a252a2a2243 from this chassis (sb_readonly=0)
Jan 30 20:20:56 compute-1 ovn_controller[251552]: 2026-01-30T20:20:56Z|00269|pinctrl|WARN|Dropped 1735 log messages in last 60 seconds (most recently, 2 seconds ago) due to excessive rate
Jan 30 20:20:56 compute-1 ovn_controller[251552]: 2026-01-30T20:20:56Z|00270|pinctrl|WARN|IGMP Querier enabled without a valid IPv4 or IPv6 address
Jan 30 20:21:07 compute-1 ovn_controller[251552]: 2026-01-30T20:21:07Z|00271|memory_trim|INFO|Detected inactivity (last active 30012 ms ago): trimming memory

● edpm_ovn_metadata_agent.service - ovn_metadata_agent container
     Loaded: loaded (/etc/systemd/system/edpm_ovn_metadata_agent.service; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:35:03 UTC; 2h 46min ago
   Main PID: 105004 (conmon)
         IO: 0B read, 137.0K written
      Tasks: 1 (limit: 100092)
     Memory: 716.0K (peak: 18.2M)
        CPU: 534ms
     CGroup: /system.slice/edpm_ovn_metadata_agent.service
             └─105004 /usr/bin/conmon --api-version 1 -c 834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20 -u 834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20/userdata -p /run/containers/storage/overlay-containers/834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20/userdata/pidfile -n ovn_metadata_agent --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20/userdata/oci-log --conmon-pidfile /run/ovn_metadata_agent.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20

Jan 30 20:20:21 compute-1 ovn_metadata_agent[105004]: 2026-01-30 20:20:21.741 212908 DEBUG oslo.privsep.daemon [-] privsep: reply[2cb071e3-340e-4696-8b3a-debc752805bc]: (4, True) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501[00m
Jan 30 20:20:21 compute-1 ovn_metadata_agent[105004]: 2026-01-30 20:20:21.761 212908 DEBUG oslo.privsep.daemon [-] privsep: reply[d7c8a0bf-7cd5-4f77-acc6-9c60a1befea3]: (4, [{'family': 0, '__align': (), 'ifi_type': 772, 'index': 1, 'flags': 65609, 'change': 0, 'attrs': [['IFLA_IFNAME', 'lo'], ['IFLA_TXQLEN', 1000], ['IFLA_OPERSTATE', 'UNKNOWN'], ['IFLA_LINKMODE', 0], ['IFLA_MTU', 65536], ['IFLA_MIN_MTU', 0], ['IFLA_MAX_MTU', 0], ['IFLA_GROUP', 0], ['IFLA_PROMISCUITY', 0], ['UNKNOWN', {'header': {'length': 8, 'type': 61}}], ['IFLA_NUM_TX_QUEUES', 1], ['IFLA_GSO_MAX_SEGS', 65535], ['IFLA_GSO_MAX_SIZE', 65536], ['IFLA_GRO_MAX_SIZE', 65536], ['UNKNOWN', {'header': {'length': 8, 'type': 63}}], ['UNKNOWN', {'header': {'length': 8, 'type': 64}}], ['IFLA_TSO_MAX_SIZE', 524280], ['IFLA_TSO_MAX_SEGS', 65535], ['UNKNOWN', {'header': {'length': 8, 'type': 66}}], ['IFLA_NUM_RX_QUEUES', 1], ['IFLA_CARRIER', 1], ['IFLA_CARRIER_CHANGES', 0], ['IFLA_CARRIER_UP_COUNT', 0], ['IFLA_CARRIER_DOWN_COUNT', 0], ['IFLA_PROTO_DOWN', 0], ['IFLA_ADDRESS', '00:00:00:00:00:00'], ['IFLA_BROADCAST', '00:00:00:00:00:00'], ['IFLA_STATS64', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_STATS', {'rx_packets': 1, 'tx_packets': 1, 'rx_bytes': 28, 'tx_bytes': 28, 'rx_errors': 0, 'tx_errors': 0, 'rx_dropped': 0, 'tx_dropped': 0, 'multicast': 0, 'collisions': 0, 'rx_length_errors': 0, 'rx_over_errors': 0, 'rx_crc_errors': 0, 'rx_frame_errors': 0, 'rx_fifo_errors': 0, 'rx_missed_errors': 0, 'tx_aborted_errors': 0, 'tx_carrier_errors': 0, 'tx_fifo_errors': 0, 'tx_heartbeat_errors': 0, 'tx_window_errors': 0, 'rx_compressed': 0, 'tx_compressed': 0}], ['IFLA_XDP', {'attrs': [['IFLA_XDP_ATTACHED', None]]}], ['IFLA_QDISC', 'noqueue'], ['IFLA_AF_SPEC', {'attrs': [['AF_INET', {'dummy': 65668, 'forwarding': 1, 'mc_forwarding': 0, 'proxy_arp': 0, 'accept_redirects': 0, 'secure_redirects': 0, 'send_redirects': 0, 'shared_media': 1, 'rp_filter': 1, 'accept_source_route': 0, 'bootp_relay': 0, 'log_martians': 0, 'tag': 0, 'arpfilter': 0, 'medium_id': 0, 'noxfrm': 1, 'nopolicy': 1, 'force_igmp_version': 0, 'arp_announce': 0, 'arp_ignore': 0, 'promote_secondaries': 1, 'arp_accept': 0, 'arp_notify': 0, 'accept_local': 0, 'src_vmark': 0, 'proxy_arp_pvlan': 0, 'route_localnet': 0, 'igmpv2_unsolicited_report_interval': 10000, 'igmpv3_unsolicited_report_interval': 1000}], ['AF_INET6', {'attrs': [['IFLA_INET6_FLAGS', 2147483648], ['IFLA_INET6_CACHEINFO', {'max_reasm_len': 65535, 'tstamp': 1268500, 'reachable_time': 23353, 'retrans_time': 1000}], ['IFLA_INET6_CONF', {'forwarding': 0, 'hop_limit': 64, 'mtu': 65536, 'accept_ra': 1, 'accept_redirects': 1, 'autoconf': 1, 'dad_transmits': 1, 'router_solicitations': 4294967295, 'router_solicitation_interval': 4000, 'router_solicitation_delay': 1000, 'use_tempaddr': 4294967295, 'temp_valid_lft': 604800, 'temp_preferred_lft': 86400, 'regen_max_retry': 3, 'max_desync_factor': 600, 'max_addresses': 16, 'force_mld_version': 0, 'accept_ra_defrtr': 1, 'accept_ra_pinfo': 1, 'accept_ra_rtr_pref': 1, 'router_probe_interval': 60000, 'accept_ra_rt_info_max_plen': 0, 'proxy_ndp': 0, 'optimistic_dad': 0, 'accept_source_route': 0, 'mc_forwarding': 0, 'disable_ipv6': 0, 'accept_dad': 4294967295, 'force_tllao': 0, 'ndisc_notify': 0}], ['IFLA_INET6_STATS', {'num': 38, 'inpkts': 0, 'inoctets': 0, 'indelivers': 0, 'outforwdatagrams': 0, 'outpkts': 0, 'outoctets': 0, 'inhdrerrors': 0, 'intoobigerrors': 0, 'innoroutes': 0, 'inaddrerrors': 0, 'inunknownprotos': 0, 'intruncatedpkts': 0, 'indiscards': 0, 'outdiscards': 0, 'outnoroutes': 0, 'reasmtimeout': 0, 'reasmreqds': 0, 'reasmoks': 0, 'reasmfails': 0, 'fragoks': 0, 'fragfails': 0, 'fragcreates': 0, 'inmcastpkts': 0, 'outmcastpkts': 0, 'inbcastpkts': 0, 'outbcastpkts': 0, 'inmcastoctets': 0, 'outmcastoctets': 0, 'inbcastoctets': 0, 'outbcastoctets': 0, 'csumerrors': 0, 'noectpkts': 0, 'ect1pkts': 0, 'ect0pkts': 0, 'cepkts': 0}], ['IFLA_INET6_ICMP6STATS', {'num': 7, 'inmsgs': 0, 'inerrors': 0, 'outmsgs': 0, 'outerrors': 0, 'csumerrors': 0}], ['IFLA_INET6_TOKEN', '::'], ['IFLA_INET6_ADDR_GEN_MODE', 0]]}]]}], ['IFLA_MAP', {'mem_start': 0, 'mem_end': 0, 'base_addr': 0, 'irq': 0, 'dma': 0, 'port': 0}], ['UNKNOWN', {'header': {'length': 4, 'type': 32830}}], ['UNKNOWN', {'header': {'length': 4, 'type': 32833}}]], 'header': {'length': 1404, 'type': 16, 'flags': 2, 'sequence_number': 255, 'pid': 267756, 'error': None, 'target': 'ovnmeta-b785f2d9-25d0-4cca-b23e-d2683fa2d149', 'stats': (0, 0, 0)}, 'state': 'up', 'event': 'RTM_NEWLINK'}]) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501[00m
Jan 30 20:20:21 compute-1 ovn_metadata_agent[105004]: 2026-01-30 20:20:21.765 105526 DEBUG neutron.privileged.agent.linux.ip_lib [-] Namespace ovnmeta-b785f2d9-25d0-4cca-b23e-d2683fa2d149 deleted. remove_netns /usr/lib/python3.9/site-packages/neutron/privileged/agent/linux/ip_lib.py:607[00m
Jan 30 20:20:21 compute-1 ovn_metadata_agent[105004]: 2026-01-30 20:20:21.765 105526 DEBUG oslo.privsep.daemon [-] privsep: reply[a50c1165-0c8e-4851-9f68-94246cab243c]: (4, None) _call_back /usr/lib/python3.9/site-packages/oslo_privsep/daemon.py:501[00m
Jan 30 20:20:40 compute-1 ovn_metadata_agent[105004]: 2026-01-30 20:20:40.443 105009 DEBUG ovsdbapp.backend.ovs_idl.event [-] Matched UPDATE: SbGlobalUpdateEvent(events=('update',), table='SB_Global', conditions=None, old_conditions=None), priority=20 to row=SB_Global(external_ids={}, nb_cfg=87, options={'arp_ns_explicit_output': 'true', 'mac_prefix': '3a:a7:05', 'max_tunid': '16711680', 'northd_internal_version': '24.03.8-20.33.0-76.8', 'svc_monitor_mac': 'c2:dd:3d:53:c2:99'}, ipsec=False) old=SB_Global(nb_cfg=86) matches /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/event.py:43[00m
Jan 30 20:20:40 compute-1 ovn_metadata_agent[105004]: 2026-01-30 20:20:40.446 105009 DEBUG neutron.agent.ovn.metadata.agent [-] Delaying updating chassis table for 10 seconds run /usr/lib/python3.9/site-packages/neutron/agent/ovn/metadata/agent.py:274[00m
Jan 30 20:20:50 compute-1 ovn_metadata_agent[105004]: 2026-01-30 20:20:50.450 105009 DEBUG ovsdbapp.backend.ovs_idl.transaction [-] Running txn n=1 command(idx=0): DbSetCommand(_result=None, table=Chassis_Private, record=964b3397-a93c-4a04-8bcb-7e022e0c00f7, col_values=(('external_ids', {'neutron:ovn-metadata-sb-cfg': '87'}),), if_exists=True) do_commit /usr/lib/python3.9/site-packages/ovsdbapp/backend/ovs_idl/transaction.py:89[00m
Jan 30 20:21:05 compute-1 ovn_metadata_agent[105004]: 2026-01-30 20:21:05.265 105009 DEBUG oslo_concurrency.lockutils [-] Acquiring lock "_check_child_processes" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:404[00m
Jan 30 20:21:05 compute-1 ovn_metadata_agent[105004]: 2026-01-30 20:21:05.266 105009 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" acquired by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: waited 0.000s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:409[00m
Jan 30 20:21:05 compute-1 ovn_metadata_agent[105004]: 2026-01-30 20:21:05.267 105009 DEBUG oslo_concurrency.lockutils [-] Lock "_check_child_processes" "released" by "neutron.agent.linux.external_process.ProcessMonitor._check_child_processes" :: held 0.001s inner /usr/lib/python3.9/site-packages/oslo_concurrency/lockutils.py:423[00m

● edpm_podman_exporter.service - podman_exporter container
     Loaded: loaded (/etc/systemd/system/edpm_podman_exporter.service; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:43:54 UTC; 2h 37min ago
   Main PID: 199283 (conmon)
         IO: 0B read, 127.0K written
      Tasks: 1 (limit: 100092)
     Memory: 688.0K (peak: 17.6M)
        CPU: 102ms
     CGroup: /system.slice/edpm_podman_exporter.service
             └─199283 /usr/bin/conmon --api-version 1 -c bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98 -u bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98/userdata -p /run/containers/storage/overlay-containers/bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98/userdata/pidfile -n podman_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98/userdata/oci-log --conmon-pidfile /run/podman_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98

Jan 30 17:43:54 compute-1 podman_exporter[199283]: ts=2026-01-30T17:43:54.577Z caller=exporter.go:68 level=info msg="Starting podman-prometheus-exporter" version="(version=1.10.1, branch=HEAD, revision=1)"
Jan 30 17:43:54 compute-1 podman_exporter[199283]: ts=2026-01-30T17:43:54.578Z caller=exporter.go:69 level=info msg=metrics enhanced=false
Jan 30 17:43:54 compute-1 podman_exporter[199283]: ts=2026-01-30T17:43:54.578Z caller=handler.go:94 level=info msg="enabled collectors"
Jan 30 17:43:54 compute-1 podman_exporter[199283]: ts=2026-01-30T17:43:54.578Z caller=handler.go:105 level=info collector=container
Jan 30 17:43:54 compute-1 podman[199267]: 2026-01-30 17:43:54.593301972 +0000 UTC m=+0.139922889 container start bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98 (image=quay.io/navidys/prometheus-podman-exporter:v1.10.1, name=podman_exporter, config_data={'command': ['--web.config.file=/etc/podman_exporter/podman_exporter.yaml'], 'environment': {'CONTAINER_HOST': 'unix:///run/podman/podman.sock', 'OS_ENDPOINT_TYPE': 'internal', 'EDPM_CONFIG_HASH': 'f3391df1932a31a4475305bf7ba459820c071ee6bc26dffcd59f1c41f8b252f0-1906ca889f747a7874c38f7dde1e751324374c17012dee147154da58b6efddd8'}, 'healthcheck': {'mount': '/var/lib/openstack/healthchecks/podman_exporter', 'test': '/openstack/healthcheck podman_exporter'}, 'image': 'quay.io/navidys/prometheus-podman-exporter:v1.10.1', 'net': 'host', 'ports': ['9882:9882'], 'privileged': True, 'recreate': True, 'restart': 'always', 'user': 'root', 'volumes': ['/var/lib/openstack/telemetry/podman_exporter.yaml:/etc/podman_exporter/podman_exporter.yaml:z', '/var/lib/openstack/certs/telemetry/default:/etc/podman_exporter/tls:z', '/run/podman/podman.sock:/run/podman/podman.sock:rw,z', '/var/lib/openstack/healthchecks/podman_exporter:/openstack:ro,z']}, config_id=podman_exporter, containUnit fcoe.service could not be found.
Unit hv_kvp_daemon.service could not be found.
er_name=podman_exporter, maintainer=Navid Yaghoobi <navidys@fedoraproject.org>, managed_by=edpm_ansible)
Jan 30 17:43:54 compute-1 podman[199267]: podman_exporter
Jan 30 17:43:54 compute-1 systemd[1]: Started podman_exporter container.
Jan 30 17:43:54 compute-1 podman_exporter[199283]: ts=2026-01-30T17:43:54.646Z caller=exporter.go:96 level=info msg="Listening on" address=:9882
Jan 30 17:43:54 compute-1 podman_exporter[199283]: ts=2026-01-30T17:43:54.646Z caller=tls_config.go:313 level=info msg="Listening on" address=[::]:9882
Jan 30 17:43:54 compute-1 podman_exporter[199283]: ts=2026-01-30T17:43:54.647Z caller=tls_config.go:349 level=info msg="TLS is enabled." http2=true address=[::]:9882

○ emergency.service - Emergency Shell
     Loaded: loaded (/usr/lib/systemd/system/emergency.service; static)
     Active: inactive (dead)
       Docs: man:sulogin(8)

● getty@tty1.service - Getty on tty1
     Loaded: loaded (/usr/lib/systemd/system/getty@.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 16:48:43 UTC; 3h 32min ago
       Docs: man:agetty(8)
             man:systemd-getty-generator(8)
             http://0pointer.de/blog/projects/serial-console.html
   Main PID: 1011 (agetty)
         IO: 0B read, 0B written
      Tasks: 1 (limit: 100092)
     Memory: 276.0K (peak: 756.0K)
        CPU: 8ms
     CGroup: /system.slice/system-getty.slice/getty@tty1.service
             └─1011 /sbin/agetty -o "-p -- \\u" --noclear - linux

Jan 30 16:48:43 np0005602931.novalocal systemd[1]: Started Getty on tty1.

● gssproxy.service - GSSAPI Proxy Daemon
     Loaded: loaded (/usr/lib/systemd/system/gssproxy.service; disabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 16:48:37 UTC; 3h 32min ago
   Main PID: 879 (gssproxy)
         IO: 0B read, 0B written
      Tasks: 6 (limit: 100092)
     Memory: 1.8M (peak: 3.5M)
        CPU: 30ms
     CGroup: /system.slice/gssproxy.service
             └─879 /usr/sbin/gssproxy -D

Jan 30 16:48:37 np0005602931.novalocal systemd[1]: Starting GSSAPI Proxy Daemon...
Jan 30 16:48:37 np0005602931.novalocal systemd[1]: Started GSSAPI Proxy Daemon.

○ import-state.service - Import network configuration from initramfs
     Loaded: loaded (/usr/lib/systemd/system/import-state.service; enabled; preset: enabled)
     Active: inactive (dead)

○ initrd-cleanup.service - Cleaning Up and Shutting Down Daemons
     Loaded: loaded (/usr/lib/systemd/system/initrd-cleanup.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
   Main PID: 617 (code=exited, status=0/SUCCESS)
        CPU: 10ms

Jan 30 16:48:34 localhost systemd[1]: Starting Cleaning Up and Shutting Down Daemons...
Jan 30 16:48:34 localhost systemd[1]: initrd-cleanup.service: Deactivated successfully.
Jan 30 16:48:34 localhost systemd[1]: Finished Cleaning Up and Shutting Down Daemons.

○ initrd-parse-etc.service - Mountpoints Configured in the Real Root
     Loaded: loaded (/usr/lib/systemd/system/initrd-parse-etc.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:33 UTC; 3h 32min ago
   Main PID: 568 (code=exited, status=0/SUCCESS)
        CPU: 14ms

Jan 30 16:48:33 localhost systemd[1]: Starting Mountpoints Configured in the Real Root...
Jan 30 16:48:33 localhost systemd[1]: initrd-parse-etc.service: Deactivated successfully.
Jan 30 16:48:33 localhost systemd[1]: Finished Mountpoints Configured in the Real Root.

○ initrd-switch-root.service - Switch Root
     Loaded: loaded (/usr/lib/systemd/system/initrd-switch-root.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
   Main PID: 621 (code=exited, status=0/SUCCESS)
        CPU: 8ms

Jan 30 16:48:34 localhost systemd[1]: Starting Switch Root...

○ initrd-udevadm-cleanup-db.service - Cleanup udev Database
     Loaded: loaded (/usr/lib/systemd/system/initrd-udevadm-cleanup-db.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
   Main PID: 619 (code=exited, status=0/SUCCESS)
        CPU: 7ms

Jan 30 16:48:34 localhost systemd[1]: Starting Cleanup udev Database...
Jan 30 16:48:34 localhost systemd[1]: initrd-udevadm-cleanup-db.service: Deactivated successfully.
Jan 30 16:48:34 localhost systemd[1]: Finished Cleanup udev Database.

○ ip6tables.service - IPv6 firewall with ip6tables
     Loaded: loaded (/usr/lib/systemd/system/ip6tables.service; disabled; preset: disabled)
     Active: inactive (dead)

○ iptables.service - IPv4 firewall with iptables
     Loaded: loaded (/usr/lib/systemd/system/iptables.service; disabled; preset: disabled)
     Active: inactive (dead) since Fri 2026-01-30 17:31:22 UTC; 2h 50min ago
   Duration: 42min 45.377s
   Main PID: 784 (code=exited, status=0/SUCCESS)
        CPU: 70ms

Jan 30 16:48:36 localhost systemd[1]: Starting IPv4 firewall with iptables...
Jan 30 16:48:36 localhost iptables.init[784]: iptables: Applying firewall rules: [  OK  ]
Jan 30 16:48:36 localhost systemd[1]: Finished IPv4 firewall with iptables.
Jan 30 17:31:21 compute-1 systemd[1]: Stopping IPv4 firewall with iptables...
Jan 30 17:31:22 compute-1 iptables.init[69049]: iptables: Setting chains to policy ACCEPT: raw mangle filter nat [  OK  ]
Jan 30 17:31:22 compute-1 iptables.init[69049]: iptables: Flushing firewall rules: [  OK  ]
Jan 30 17:31:22 compute-1 systemd[1]: iptables.service: Deactivated successfully.
Jan 30 17:31:22 compute-1 systemd[1]: Stopped IPv4 firewall with iptables.

● irqbalance.service - irqbalance daemon
     Loaded: loaded (/usr/lib/systemd/system/irqbalance.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
       Docs: man:irqbalance(1)
             https://github.com/Irqbalance/irqbalance
   Main PID: 790 (irqbalance)
         IO: 0B read, 0B written
      Tasks: 2 (limit: 100092)
     Memory: 1.1M (peak: 1.6M)
        CPU: 877ms
     CGroup: /system.slice/irqbalance.service
             └─790 /usr/sbin/irqbalance

Jan 30 16:48:46 np0005602931.novalocal irqbalance[790]: Cannot change IRQ 32 affinity: Operation not permitted
Jan 30 16:48:46 np0005602931.novalocal irqbalance[790]: IRQ 32 affinity is now unmanaged
Jan 30 16:48:46 np0005602931.novalocal irqbalance[790]: Cannot change IRQ 30 affinity: Operation not permitted
Jan 30 16:48:46 np0005602931.novalocal irqbalance[790]: IRQ 30 affinity is now unmanaged
Jan 30 16:48:46 np0005602931.novalocal irqbalance[790]: Cannot change IRQ 29 affinity: Operation not permitted
Jan 30 16:48:46 np0005602931.novalocal irqbalance[790]: IRQ 29 affinity is now unmanaged
Jan 30 17:02:46 np0005602931.novalocal irqbalance[790]: Cannot change IRQ 27 affinity: Operation not permitted
Jan 30 17:02:46 np0005602931.novalocal irqbalance[790]: IRQ 27 affinity is now unmanaged
Jan 30 17:38:46 compute-1 irqbalance[790]: Cannot change IRQ 26 affinity: Operation not permitted
Jan 30 17:38:46 compute-1 irqbalance[790]: IRQ 26 affinity is now unmanaged

○ iscsi-init.service - One time configuration for iscsi.service
     Loaded: loaded (/usr/lib/systemd/system/iscsi-init.service; disabled; preset: disabled)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 17:41:06 UTC; 2h 40min ago

Jan 30 17:40:32 compute-1 systemd[1]: One time configuration for iscsi.service was skipped because of an unmet condition check (ConditionPathExists=!/etc/iscsi/initiatorname.iscsi).
Jan 30 17:41:06 compute-1 systemd[1]: One time configuration for iscsi.service was skipped because of an unmet condition check (ConditionPathExists=!/etc/iscsi/initiatorname.iscsi).

○ iscsi-onboot.service - Special handling of early boot iSCSI sessions
     Loaded: loaded (/usr/lib/systemd/system/iscsi-onboot.service; enabled; preset: enabled)
     Active: inactive (dead)
       Docs: man:iscsiadm(8)
             man:iscsid(8)

● iscsi-shutdown.service - Logout off all iSCSI sessions on shutdown
     Loaded: loaded (/usr/lib/systemd/system/iscsi-shutdown.service; static)
     Active: active (exited) since Fri 2026-01-30 17:40:32 UTC; 2h 40min ago
       Docs: man:iscsid(8)
             man:iscsiadm(8)
   Main PID: 162642 (code=exited, status=0/SUCCESS)
        CPU: 1ms

Jan 30 17:40:32 compute-1 systemd[1]: Starting Logout off all iSCSI sessions on shutdown...
Jan 30 17:40:32 compute-1 systemd[1]: Finished Logout off all iSCSI sessions on shutdown.

○ iscsi-starter.service
     Loaded: loaded (/usr/lib/systemd/system/iscsi-starter.service; enabled; preset: disabled)
     Active: inactive (dead)

○ iscsi.service - Login and scanning of iSCSI devices
     Loaded: loaded (/usr/lib/systemd/system/iscsi.service; indirect; preset: enabled)
     Active: inactive (dead)
       Docs: man:iscsiadm(8)
             man:iscsid(8)

● iscsid.service - Open-iSCSI
     Loaded: loaded (/usr/lib/systemd/system/iscsid.service; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:41:06 UTC; 2h 40min ago
TriggeredBy: ● iscsid.socket
       Docs: man:iscsid(8)
             man:iscsiuio(8)
             man:iscsiadm(8)
   Main PID: 169133 (iscsid)
     Status: "Ready to process requests"
         IO: 0B read, 0B written
      Tasks: 1 (limit: 100092)
     Memory: 1.9M (peak: 2.0M)
        CPU: 5ms
     CGroup: /system.slice/iscsid.service
             └─169133 /usr/sbin/iscsid -f

Jan 30 17:41:06 compute-1 systemd[1]: Starting Open-iSCSI...
Jan 30 17:41:06 compute-1 systemd[1]: Started Open-iSCSI.

○ iscsiuio.service - iSCSI UserSpace I/O driver
     Loaded: loaded (/usr/lib/systemd/system/iscsiuio.service; disabled; preset: disabled)
     Active: inactive (dead)
TriggeredBy: ○ iscsiuio.socket
       Docs: man:iscsiuio(8)

● kdump.service - Crash recovery kernel arming
     Loaded: loaded (/usr/lib/systemd/system/kdump.service; enabled; preset: enabled)
     Active: active (exited) since Fri 2026-01-30 16:48:59 UTC; 3h 32min ago
   Main PID: 1009 (code=exited, status=0/SUCCESS)
        CPU: 15.628s

Jan 30 16:48:57 np0005602931.novalocal dracut[1292]: Linked:         0 files
Jan 30 16:48:57 np0005602931.novalocal dracut[1292]: Compared:       0 xattrs
Jan 30 16:48:57 np0005602931.novalocal dracut[1292]: Compared:       0 files
Jan 30 16:48:57 np0005602931.novalocal dracut[1292]: Saved:          0 B
Jan 30 16:48:57 np0005602931.novalocal dracut[1292]: Duration:       0.000517 seconds
Jan 30 16:48:57 np0005602931.novalocal dracut[1292]: *** Hardlinking files done ***
Jan 30 16:48:58 np0005602931.novalocal dracut[1292]: *** Creating initramfs image file '/boot/initramfs-5.14.0-665.el9.x86_64kdump.img' done ***
Jan 30 16:48:59 np0005602931.novalocal kdumpctl[1018]: kdump: kexec: loaded kdump kernel
Jan 30 16:48:59 np0005602931.novalocal kdumpctl[1018]: kdump: Starting kdump: [OK]
Jan 30 16:48:59 np0005602931.novalocal systemd[1]: Finished Crash recovery kernel arming.

● kmod-static-nodes.service - Create List of Static Device Nodes
     Loaded: loaded (/usr/lib/systemd/system/kmod-static-nodes.service; static)
     Active: active (exited) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
   Main PID: 673 (code=exited, status=0/SUCCESS)
        CPU: 5ms

Jan 30 16:48:35 localhost systemd[1]: Finished Create List of Static Device Nodes.

● ldconfig.service - Rebuild Dynamic Linker Cache
     Loaded: loaded (/usr/lib/systemd/system/ldconfig.service; static)
     Active: active (exited) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:ldconfig(8)
   Main PID: 697 (code=exited, status=0/SUCCESS)
        CPU: 51ms

Jan 30 16:48:35 localhost systemd[1]: Starting Rebuild Dynamic Linker Cache...
Jan 30 16:48:35 localhost systemd[1]: Finished Rebuild Dynamic Linker Cache.

○ libvirtd.service
     Loaded: masked (Reason: Unit libvirtd.service is masked.)
     Active: inactive (dead)
TriggeredBy: ○ libvirtd.socket
             ○ libvirtd-ro.socket
             ○ libvirtd-admin.socket

○ loadmodules.service - Load legacy module configuration
     Loaded: loaded (/usr/lib/systemd/system/loadmodules.service; enabled; preset: enabled)
     Active: inactive (dead)

○ logrotate.service - Rotate log files
     Loaded: loaded (/usr/lib/systemd/system/logrotate.service; statUnit lvm2-activation-early.service could not be found.
ic)
     Active: inactive (dead)
TriggeredBy: ● logrotate.timer
       Docs: man:logrotate(8)
             man:logrotate.conf(5)

○ lvm2-lvmpolld.service - LVM2 poll daemon
     Loaded: loaded (/usr/lib/systemd/system/lvm2-lvmpolld.service; static)
     Active: inactive (dead)
TriggeredBy: ● lvm2-lvmpolld.socket
       Docs: man:lvmpolld(8)

● lvm2-monitor.service - Monitoring of LVM2 mirrors, snapshots etc. using dmeventd or progress polling
     Loaded: loaded (/usr/lib/systemd/system/lvm2-monitor.service; enabled; preset: enabled)
     Active: active (exited) since Fri 2026-01-30 17:24:27 UTC; 2h 56min ago
       Docs: man:dmeventd(8)
             man:lvcreate(8)
             man:lvchange(8)
             man:vgchange(8)
   Main PID: 34699 (code=exited, status=0/SUCCESS)
        CPU: 24ms

Jan 30 17:24:27 compute-1 systemd[1]: Starting Monitoring of LVM2 mirrors, snapshots etc. using dmeventd or progress polling...
Jan 30 17:24:27 compute-1 systemd[1]: Finished Monitoring of LVM2 mirrors, snapshots etc. using dmeventd or progress polling.

○ mdmonitor.service - Software RAID monitoring and management
     Loaded: loaded (/usr/lib/systemd/system/mdmonitor.service; enabled; preset: enabled)
     Active: inactive (dead)

○ microcode.service - Load CPU microcode update
     Loaded: loaded (/usr/lib/systemd/system/microcode.service; enabled; preset: enabled)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:48:36 UTC; 3h 32min ago

Jan 30 16:48:36 localhost systemd[1]: Load CPU microcode update was skipped because of an unmet condition check (ConditionPathExists=/sys/devices/system/cpu/microcode/reload).

○ modprobe@configfs.service - Load Kernel Module configfs
     Loaded: loaded (/usr/lib/systemd/system/modprobe@.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
       Docs: man:modprobe(8)
   Main PID: 770 (code=exited, status=0/SUCCESS)
        CPU: 5ms

Jan 30 16:48:36 localhost systemd[1]: Starting Load Kernel Module configfs...
Jan 30 16:48:36 localhost systemd[1]: modprobe@configfs.service: Deactivated successfully.
Jan 30 16:48:36 localhost systemd[1]: Finished Load Kernel Module configfs.

○ modprobe@drm.service - Load Kernel Module drm
     Loaded: loaded (/usr/lib/systemd/system/modprobe@.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:modprobe(8)
   Main PID: 675 (code=exited, status=0/SUCCESS)
        CPU: 71ms

Jan 30 16:48:35 localhost systemd[1]: modprobe@drm.service: Deactivated successfully.
Jan 30 16:48:35 localhost systemd[1]: Finished Load Kernel Module drm.

○ modprobe@efi_pstore.service - Load Kernel Module efi_pstore
     Loaded: loaded (/usr/lib/systemd/system/modprobe@.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:modprobe(8)
   Main PID: 676 (code=exited, status=0/SUCCESS)
        CPU: 4ms

Jan 30 16:48:35 localhost systemd[1]: modprobe@efi_pstore.service: Deactivated successfully.
Jan 30 16:48:35 localhost systemd[1]: Finished Load Kernel Module efi_pstore.

○ modprobe@fuse.service - Load Kernel Module fuse
     Loaded: loaded (/usr/lib/systemd/system/modprobe@.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:modprobe(8)
   Main PID: 677 (code=exited, status=0/SUCCESS)
        CPU: 38ms

Jan 30 16:48:35 localhost systemd[1]: modprobe@fuse.service: Deactivated successfully.
Jan 30 16:48:35 localhost systemd[1]: Finished Load Kernel Module fuse.

● multipathd.service - Device-Mapper Multipath Device Controller
     Loaded: loaded (/usr/lib/systemd/system/multipathd.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 17:41:07 UTC; 2h 40min ago
TriggeredBy: ● multipathd.socket
   Main PID: 169292 (multipathd)
     Status: "up"
         IO: 0B read, 0B written
      Tasks: 7
     Memory: 18.4M (peak: 18.7M)
        CPU: 1.037s
     CGroup: /system.slice/multipathd.service
             └─169292 /sbin/multipathd -d -s

Jan 30 17:41:07 compute-1 systemd[1]: Starting Device-Mapper Multipath Device Controller...
Jan 30 17:41:07 compute-1 multipathd[169292]: --------start up--------
Jan 30 17:41:07 compute-1 multipathd[169292]: read /etc/multipath.conf
Jan 30 17:41:07 compute-1 multipathd[169292]: path checkers start up
Jan 30 17:41:07 compute-1 systemd[1]: Started Device-Mapper Multipath Device Controller.

○ netns-placeholder.service - Create netns directory
     Loaded: loaded (/etc/systemd/system/netns-placeholder.service; enabled; preset: enabled)
     Active: inactive (dead) since Fri 2026-01-30 17:34:47 UTC; 2h 46min ago
   Main PID: 102270 (code=exited, status=0/SUCCESS)
        CPU: 6ms

Jan 30 17:34:47 compute-1 systemd[1]: Starting Create netns directory...
Jan 30 17:34:47 compute-1 systemd[1]: netns-placeholder.service: Deactivated successfully.
Jan 30 17:34:47 compute-1 systemd[1]: Finished Create netns directory.

○ network.service - LSB: Bring up/down networking
     Loaded: loaded (/etc/rc.d/init.d/network; generated)
     Active: inactive (dead)
       Docs: man:systemd-sysv-generator(8)

● NetworkManager-wait-online.service - Network Manager Wait Online
     Loaded: loaded (/usr/lib/systemd/system/NetworkManager-wait-online.service; enabled; preset: disabled)
     Active: active (exited) since Fri 2026-01-30 17:29:21 UTC; 2h 52min ago
       Docs: man:NetworkManager-wait-online.service(8)
   Main PID: 55978 (code=exited, status=0/SUCCESS)
        CPU: 25ms

Jan 30 17:29:21 compute-1 systemd[1]: Starting Network Manager Wait Online...
Jan 30 17:29:21 compute-1 systemd[1]: Finished Network Manager Wait Online.

● NetworkManager.service - Network Manager
     Loaded: loaded (/usr/lib/systemd/system/NetworkManager.service; enabled; preset: enabled)
    Drop-In: /usr/lib/systemd/system/NetworkManager.service.d
             └─NetworkManager-ovs.conf
     Active: active (running) since Fri 2026-01-30 17:29:21 UTC; 2h 52min ago
       Docs: man:NetworkManager(8)
   Main PID: 55963 (NetworkManager)
         IO: 104.0K read, 261.5K written
      Tasks: 3 (limit: 100092)
     Memory: 5.9M (peak: 6.6M)
        CPU: 1min 23.257s
     CGroup: /system.slice/NetworkManager.service
             └─55963 /usr/sbin/NetworkManager --no-daemon

Jan 30 20:18:14 compute-1 NetworkManager[55963]: <info>  [1769804294.9044] manager: (tap33a85eb7-40): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/200)
Jan 30 20:18:26 compute-1 NetworkManager[55963]: <info>  [1769804306.9177] device (tapdc1fd9f5-c0): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')
Jan 30 20:19:53 compute-1 NetworkManager[55963]: <info>  [1769804393.4837] manager: (tape413b81f-0a): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/201)
Jan 30 20:19:55 compute-1 NetworkManager[55963]: <info>  [1769804395.5564] manager: (tape413b81f-0a): new Tun device (/org/freedesktop/NetworkManager/Devices/202)
Jan 30 20:19:55 compute-1 NetworkManager[55963]: <info>  [1769804395.6203] device (tape413b81f-0a): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')
Jan 30 20:19:55 compute-1 NetworkManager[55963]: <info>  [1769804395.6212] device (tape413b81f-0a): state change: unavailable -> disconnected (reason 'none', managed-type: 'external')
Jan 30 20:19:55 compute-1 NetworkManager[55963]: <info>  [1769804395.6666] manager: (tapb785f2d9-20): new Veth device (/org/freedesktop/NetworkManager/Devices/203)
Jan 30 20:19:55 compute-1 NetworkManager[55963]: <info>  [1769804395.7275] device (tapb785f2d9-20): carrier: link connected
Jan 30 20:19:55 compute-1 NetworkManager[55963]: <info>  [1769804395.9173] manager: (tapb785f2d9-20): new Open vSwitch Port device (/org/freedesktop/NetworkManager/Devices/204)
Jan 30 20:20:21 compute-1 NetworkManager[55963]: <info>  [1769804421.3738] device (tape413b81f-0a): state change: disconnected -> unmanaged (reason 'unmanaged', managed-type: 'removed')

○ nfs-idmapd.service - NFSv4 ID-name Unit ntpd.service could not be found.
Unit ntpdate.service could not be found.
mapping service
     Loaded: loaded (/usr/lib/systemd/system/nfs-idmapd.service; static)
     Active: inactive (dead)
       Docs: man:idmapd(8)

○ nfs-mountd.service - NFS Mount Daemon
     Loaded: loaded (/usr/lib/systemd/system/nfs-mountd.service; static)
     Active: inactive (dead)
       Docs: man:rpc.mountd(8)

○ nfs-server.service - NFS server and services
     Loaded: loaded (/usr/lib/systemd/system/nfs-server.service; disabled; preset: disabled)
     Active: inactive (dead)
       Docs: man:rpc.nfsd(8)
             man:exportfs(8)

○ nfs-utils.service - NFS server and client services
     Loaded: loaded (/usr/lib/systemd/system/nfs-utils.service; static)
     Active: inactive (dead)

○ nfsdcld.service - NFSv4 Client Tracking Daemon
     Loaded: loaded (/usr/lib/systemd/system/nfsdcld.service; static)
     Active: inactive (dead)
       Docs: man:nfsdcld(8)

● nftables.service - Netfilter Tables
     Loaded: loaded (/usr/lib/systemd/system/nftables.service; enabled; preset: disabled)
     Active: active (exited) since Fri 2026-01-30 17:31:23 UTC; 2h 50min ago
       Docs: man:nft(8)
   Main PID: 69439 (code=exited, status=0/SUCCESS)
        CPU: 11ms

Jan 30 17:31:23 compute-1 systemd[1]: Starting Netfilter Tables...
Jan 30 17:31:23 compute-1 systemd[1]: Finished Netfilter Tables.

● nis-domainname.service - Read and set NIS domainname from /etc/sysconfig/network
     Loaded: loaded (/usr/lib/systemd/system/nis-domainname.service; enabled; preset: enabled)
     Active: active (exited) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
   Main PID: 678 (code=exited, status=0/SUCCESS)
        CPU: 10ms

Jan 30 16:48:35 localhost systemd[1]: Finished Read and set NIS domainname from /etc/sysconfig/network.

○ nvmefc-boot-connections.service - Auto-connect to subsystems on FC-NVME devices found during boot
     Loaded: loaded (/usr/lib/systemd/system/nvmefc-boot-connections.service; enabled; preset: enabled)
     Active: inactive (dead)

● openvswitch.service - Open vSwitch
     Loaded: loaded (/usr/lib/systemd/system/openvswitch.service; enabled; preset: disabled)
     Active: active (exited) since Fri 2026-01-30 17:29:07 UTC; 2h 52min ago
   Main PID: 54273 (code=exited, status=0/SUCCESS)
        CPU: 2ms

Jan 30 17:29:07 compute-1 systemd[1]: Starting Open vSwitch...
Jan 30 17:29:07 compute-1 systemd[1]: Finished Open vSwitch.

● ovs-delete-transient-ports.service - Open vSwitch Delete Transient Ports
     Loaded: loaded (/usr/lib/systemd/system/ovs-delete-transient-ports.service; static)
     Active: active (exited) since Fri 2026-01-30 17:29:07 UTC; 2h 52min ago
   Main PID: 54210 (code=exited, status=0/SUCCESS)
        CPU: 28ms

Jan 30 17:29:07 compute-1 systemd[1]: Starting Open vSwitch Delete Transient Ports...
Jan 30 17:29:07 compute-1 systemd[1]: Finished Open vSwitch Delete Transient Ports.

● ovs-vswitchd.service - Open vSwitch Forwarding Unit
     Loaded: loaded (/usr/lib/systemd/system/ovs-vswitchd.service; static)
     Active: active (running) since Fri 2026-01-30 17:29:07 UTC; 2h 52min ago
   Main PID: 54264 (ovs-vswitchd)
         IO: 3.4M read, 916.0K written
      Tasks: 13 (limit: 100092)
     Memory: 252.5M (peak: 255.3M)
        CPU: 43.608s
     CGroup: /system.slice/ovs-vswitchd.service
             └─54264 ovs-vswitchd unix:/var/run/openvswitch/db.sock -vconsole:emer -vsyslog:err -vfile:info --mlockall --user openvswitch:hugetlbfs --no-chdir --log-file=/var/log/openvswitch/ovs-vswitchd.log --pidfile=/var/run/openvswitch/ovs-vswitchd.pid --detach

Jan 30 17:29:07 compute-1 systemd[1]: Starting Open vSwitch Forwarding Unit...
Jan 30 17:29:07 compute-1 ovs-ctl[54254]: Inserting openvswitch module [  OK  ]
Jan 30 17:29:07 compute-1 ovs-ctl[54223]: Starting ovs-vswitchd [  OK  ]
Jan 30 17:29:07 compute-1 ovs-ctl[54223]: Enabling remote OVSDB managers [  OK  ]
Jan 30 17:29:07 compute-1 systemd[1]: Started Open vSwitch Forwarding Unit.
Jan 30 17:29:07 compute-1 ovs-vsctl[54271]: ovs|00001|vsctl|INFO|Called as ovs-vsctl --no-wait add Open_vSwitch . external-ids hostname=computeUnit pacemaker.service could not be found.
Unit plymouth-quit-wait.service could not be found.
Unit plymouth-start.service could not be found.
-1

● ovsdb-server.service - Open vSwitch Database Unit
     Loaded: loaded (/usr/lib/systemd/system/ovsdb-server.service; static)
     Active: active (running) since Fri 2026-01-30 17:29:07 UTC; 2h 52min ago
   Main PID: 54182 (ovsdb-server)
         IO: 1.2M read, 1.3M written
      Tasks: 1 (limit: 100092)
     Memory: 5.2M (peak: 41.6M)
        CPU: 37.416s
     CGroup: /system.slice/ovsdb-server.service
             └─54182 ovsdb-server /etc/openvswitch/conf.db -vconsole:emer -vsyslog:err -vfile:info --remote=punix:/var/run/openvswitch/db.sock --private-key=db:Open_vSwitch,SSL,private_key --certificate=db:Open_vSwitch,SSL,certificate --bootstrap-ca-cert=db:Open_vSwitch,SSL,ca_cert --user openvswitch:hugetlbfs --no-chdir --log-file=/var/log/openvswitch/ovsdb-server.log --pidfile=/var/run/openvswitch/ovsdb-server.pid --detach

Jan 30 17:29:07 compute-1 chown[54129]: /usr/bin/chown: cannot access '/run/openvswitch': No such file or directory
Jan 30 17:29:07 compute-1 ovs-ctl[54134]: /etc/openvswitch/conf.db does not exist ... (warning).
Jan 30 17:29:07 compute-1 ovs-ctl[54134]: Creating empty database /etc/openvswitch/conf.db [  OK  ]
Jan 30 17:29:07 compute-1 ovs-ctl[54134]: Starting ovsdb-server [  OK  ]
Jan 30 17:29:07 compute-1 ovs-vsctl[54183]: ovs|00001|vsctl|INFO|Called as ovs-vsctl --no-wait -- init -- set Open_vSwitch . db-version=8.5.1
Jan 30 17:29:07 compute-1 ovs-vsctl[54203]: ovs|00001|vsctl|INFO|Called as ovs-vsctl --no-wait set Open_vSwitch . ovs-version=3.3.5-115.el9s "external-ids:system-id=\"964b3397-a93c-4a04-8bcb-7e022e0c00f7\"" "external-ids:rundir=\"/var/run/openvswitch\"" "system-type=\"centos\"" "system-version=\"9\""
Jan 30 17:29:07 compute-1 ovs-ctl[54134]: Configuring Open vSwitch system IDs [  OK  ]
Jan 30 17:29:07 compute-1 ovs-vsctl[54209]: ovs|00001|vsctl|INFO|Called as ovs-vsctl --no-wait add Open_vSwitch . external-ids hostname=compute-1
Jan 30 17:29:07 compute-1 ovs-ctl[54134]: Enabling remote OVSDB managers [  OK  ]
Jan 30 17:29:07 compute-1 systemd[1]: Started Open vSwitch Database Unit.

● podman.service - Podman API Service
     Loaded: loaded (/usr/lib/systemd/system/podman.service; disabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:43:54 UTC; 2h 37min ago
TriggeredBy: ● podman.socket
       Docs: man:podman-system-service(1)
   Main PID: 199294 (podman)
         IO: 0B read, 0B written
      Tasks: 11 (limit: 100092)
     Memory: 21.7M (peak: 23.5M)
        CPU: 23.199s
     CGroup: /system.slice/podman.service
             └─199294 /usr/bin/podman --log-level=info system service

Jan 30 17:43:54 compute-1 podman[199294]: time="2026-01-30T17:43:54Z" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled"
Jan 30 17:43:54 compute-1 podman[199294]: time="2026-01-30T17:43:54Z" level=info msg="Using systemd socket activation to determine API endpoint"
Jan 30 17:43:54 compute-1 podman[199294]: time="2026-01-30T17:43:54Z" level=info msg="API service listening on \"/run/podman/podman.sock\". URI: \"unix:///run/podman/podman.sock\""
Jan 30 17:43:54 compute-1 podman[199294]: @ - - [30/Jan/2026:17:43:54 +0000] "GET /v4.9.3/libpod/_ping HTTP/1.1" 200 2 "" "Go-http-client/1.1"
Jan 30 17:43:54 compute-1 podman[199294]: time="2026-01-30T17:43:54Z" level=info msg="List containers: received `last` parameter - overwriting `limit`"
Jan 30 17:43:54 compute-1 podman[199294]: @ - - [30/Jan/2026:17:43:54 +0000] "GET /v4.9.3/libpod/containers/json?all=true&external=false&last=0&namespace=false&size=true&sync=false HTTP/1.1" 200 18075 "" "Go-http-client/1.1"
Jan 30 18:43:54 compute-1 podman[199294]: time="2026-01-30T18:43:54Z" level=info msg="List containers: received `last` parameter - overwriting `limit`"
Jan 30 18:43:54 compute-1 podman[199294]: @ - - [30/Jan/2026:18:43:54 +0000] "GET /v4.9.3/libpod/containers/json?all=true&external=false&last=0&namespace=false&size=true&sync=false HTTP/1.1" 200 26785 "" "Go-http-client/1.1"
Jan 30 19:43:54 compute-1 podmaUnit power-profiles-daemon.service could not be found.
Unit rpc-svcgssd.service could not be found.
n[199294]: time="2026-01-30T19:43:54Z" level=info msg="List containers: received `last` parameter - overwriting `limit`"
Jan 30 19:43:54 compute-1 podman[199294]: @ - - [30/Jan/2026:19:43:54 +0000] "GET /v4.9.3/libpod/containers/json?all=true&external=false&last=0&namespace=false&size=true&sync=false HTTP/1.1" 200 26784 "" "Go-http-client/1.1"

● polkit.service - Authorization Manager
     Loaded: loaded (/usr/lib/systemd/system/polkit.service; static)
     Active: active (running) since Fri 2026-01-30 17:26:22 UTC; 2h 55min ago
       Docs: man:polkit(8)
   Main PID: 44106 (polkitd)
         IO: 18.6M read, 0B written
      Tasks: 12 (limit: 100092)
     Memory: 24.5M (peak: 26.3M)
        CPU: 1.121s
     CGroup: /system.slice/polkit.service
             └─44106 /usr/lib/polkit-1/polkitd --no-debug

Jan 30 17:37:24 compute-1 polkitd[44106]: Reloading rules
Jan 30 17:37:24 compute-1 polkitd[44106]: Collecting garbage unconditionally...
Jan 30 17:37:24 compute-1 polkitd[44106]: Loading rules from directory /etc/polkit-1/rules.d
Jan 30 17:37:24 compute-1 polkitd[44106]: Loading rules from directory /usr/share/polkit-1/rules.d
Jan 30 17:37:24 compute-1 polkitd[44106]: Finished loading, compiling and executing 3 rules
Jan 30 17:37:24 compute-1 polkitd[44106]: Reloading rules
Jan 30 17:37:24 compute-1 polkitd[44106]: Collecting garbage unconditionally...
Jan 30 17:37:24 compute-1 polkitd[44106]: Loading rules from directory /etc/polkit-1/rules.d
Jan 30 17:37:24 compute-1 polkitd[44106]: Loading rules from directory /usr/share/polkit-1/rules.d
Jan 30 17:37:24 compute-1 polkitd[44106]: Finished loading, compiling and executing 3 rules

○ raid-check.service - RAID setup health check
     Loaded: loaded (/usr/lib/systemd/system/raid-check.service; static)
     Active: inactive (dead)
TriggeredBy: ○ raid-check.timer

○ rbdmap.service - Map RBD devices
     Loaded: loaded (/usr/lib/systemd/system/rbdmap.service; disabled; preset: disabled)
     Active: inactive (dead)

○ rc-local.service - /etc/rc.d/rc.local Compatibility
     Loaded: loaded (/usr/lib/systemd/system/rc-local.service; static)
     Active: inactive (dead)
       Docs: man:systemd-rc-local-generator(8)

○ rescue.service - Rescue Shell
     Loaded: loaded (/usr/lib/systemd/system/rescue.service; static)
     Active: inactive (dead)
       Docs: man:sulogin(8)

○ rpc-gssd.service - RPC security service for NFS client and server
     Loaded: loaded (/usr/lib/systemd/system/rpc-gssd.service; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:48:37 UTC; 3h 32min ago
       Docs: man:rpc.gssd(8)

Jan 30 16:48:37 np0005602931.novalocal systemd[1]: RPC security service for NFS client and server was skipped because of an unmet condition check (ConditionPathExists=/etc/krb5.keytab).

● rpc-statd-notify.service - Notify NFS peers of a restart
     Loaded: loaded (/usr/lib/systemd/system/rpc-statd-notify.service; static)
     Active: active (exited) since Fri 2026-01-30 16:48:43 UTC; 3h 32min ago
       Docs: man:sm-notify(8)
             man:rpc.statd(8)
        CPU: 6ms

Jan 30 16:48:43 np0005602931.novalocal systemd[1]: Starting Notify NFS peers of a restart...
Jan 30 16:48:43 np0005602931.novalocal sm-notify[1005]: Version 2.5.4 starting
Jan 30 16:48:43 np0005602931.novalocal systemd[1]: Started Notify NFS peers of a restart.

○ rpc-statd.service - NFS status monitor for NFSv2/3 locking.
     Loaded: loaded (/usr/lib/systemd/system/rpc-statd.service; static)
     Active: inactive (dead)
       Docs: man:rpc.statd(8)

● rpcbind.service - RPC Bind
     Loaded: loaded (/usr/lib/systemd/system/rpcbind.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
TriggeredBy: ● rpcbind.socket
       Docs: man:rpcbind(8)
   Main PID: 702 (rpcbind)
         IO: 0B read, 0B written
      Tasks: 1 (limit: 100092)
     Memory: 2.7M (peak: 3.0M)
        CPU: 55ms
     CGroup: /system.slice/rpcbind.service
             └─702 /usr/bin/rpcbind -w -f

Jan 30 16:48:35 localhost systemd[1]: Starting RPC Bind...
Jan 30 16:48:35 localhost systemd[1]: Started RPC Bind.

● rsyslog.service - System Logging Service
     Loaded: loaded (/usr/lib/systemd/system/rsyslog.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 16:48:44 UTC; 3h 32min ago
       Docs: man:rsyslogd(8)
             https://www.rsyslog.com/doc/
   Main PID: 1006 (rsyslogd)
         IO: 0B read, 56.3M written
      Tasks: 3 (limit: 100092)
     Memory: 56.7M (peak: 57.0M)
        CPU: 24.493s
     CGroup: /system.slice/rsyslog.service
             └─1006 /usr/sbin/rsyslogd -n

Jan 30 20:15:23 compute-1 rsyslogd[1006]: message too long (8192) with configured size 8096, begin of message is: 2026-01-30 20:15:23.701 12 ERROR oslo_messaging.notify.messaging [-] Could not s [v8.2510.0-2.el9 try https://www.rsyslog.com/e/2445 ]
Jan 30 20:15:23 compute-1 rsyslogd[1006]: message too long (8192) with configured size 8096, begin of message is: 2026-01-30 20:15:23.706 12 ERROR oslo_messaging.notify.messaging [-] Could not s [v8.2510.0-2.el9 try https://www.rsyslog.com/e/2445 ]
Jan 30 20:15:23 compute-1 rsyslogd[1006]: message too long (8192) with configured size 8096, begin of message is: 2026-01-30 20:15:23.711 12 ERROR oslo_messaging.notify.messaging [-] Could not s [v8.2510.0-2.el9 try https://www.rsyslog.com/e/2445 ]
Jan 30 20:15:23 compute-1 rsyslogd[1006]: message too long (8192) with configured size 8096, begin of message is: 2026-01-30 20:15:23.716 12 ERROR oslo_messaging.notify.messaging [-] Could not s [v8.2510.0-2.el9 try https://www.rsyslog.com/e/2445 ]
Jan 30 20:15:23 compute-1 rsyslogd[1006]: message too long (8192) with configured size 8096, begin of message is: 2026-01-30 20:15:23.792 12 ERROR oslo_messaging.notify.messaging [-] Could not s [v8.2510.0-2.el9 try https://www.rsyslog.com/e/2445 ]
Jan 30 20:15:23 compute-1 rsyslogd[1006]: message too long (8192) with configured size 8096, begin of message is: 2026-01-30 20:15:23.795 12 ERROR oslo_messaging.notify.messaging [-] Could not s [v8.2510.0-2.el9 try https://www.rsyslog.com/e/2445 ]
Jan 30 20:15:23 compute-1 rsyslogd[1006]: message too long (8192) with configured size 8096, begin of message is: 2026-01-30 20:15:23.799 12 ERROR oslo_messaging.notify.messaging [-] Could not s [v8.2510.0-2.el9 try https://www.rsyslog.com/e/2445 ]
Jan 30 20:15:23 compute-1 rsyslogd[1006]: message too long (8192) with configured size 8096, begin of message is: 2026-01-30 20:15:23.802 12 ERROR oslo_messaging.notify.messaging [-] Could not s [v8.2510.0-2.el9 try https://www.rsyslog.com/e/2445 ]
Jan 30 20:17:32 compute-1 rsyslogd[1006]: imjournal: journal files changed, reloading...  [v8.2510.0-2.el9 try https://www.rsyslog.com/e/0 ]
Jan 30 20:17:32 compute-1 rsyslogd[1006]: imjournal: journal files changed, reloading...  [v8.2510.0-2.el9 try https://www.rsyslog.com/e/0 ]

○ selinux-autorelabel-mark.service - Mark the need to relabel after reboot
     Loaded: loaded (/usr/lib/systemd/system/selinux-autorelabel-mark.service; enabled; preset: enabled)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:48:35 UTC; 3h 32min ago

Jan 30 16:48:35 localhost systemd[1]: Mark the need to relabel after reboot was skipped because of an unmet condition check (ConditionSecurity=!selinux).

● serial-getty@ttyS0.service - Serial Getty on ttyS0
     Loaded: loaded (/usr/lib/systemd/system/serial-getty@.service; enabled-runtime; preset: disabled)
     Active: active (running) since Fri 2026-01-30 16:48:43 UTC; 3h 32min ago
       Docs: man:agetty(8)
             man:systemd-getty-generator(8)
             http://0pointer.de/blog/projects/serial-console.html
   Main PID: 1012 (agetty)
         IO: 0B read, 0B written
      Tasks: 1 (limit: 100092)
     Memory: 228.0K (peak: 484.0K)
        CPU: 7ms
     CGroup: /system.slice/system-serial\x2dgetty.slice/serial-getty@ttyS0.service
             └─1012 /sbin/agetty -o "-p -- \\u" --keep-baud 115200,57600,38400,9600 - vt220

Jan 30 16:48:43 np0005602931.novalocal systemd[1]: StarUnit sntp.service could not be found.
Unit sshd-keygen.service could not be found.
ted Serial Getty on ttyS0.

○ sshd-keygen@ecdsa.service - OpenSSH ecdsa Server Key Generation
     Loaded: loaded (/usr/lib/systemd/system/sshd-keygen@.service; disabled; preset: disabled)
    Drop-In: /usr/lib/systemd/system/sshd-keygen@.service.d
             └─disable-sshd-keygen-if-cloud-init-active.conf
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 17:37:28 UTC; 2h 43min ago

Jan 30 16:48:36 localhost systemd[1]: OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
Jan 30 17:37:28 compute-1 systemd[1]: OpenSSH ecdsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).

○ sshd-keygen@ed25519.service - OpenSSH ed25519 Server Key Generation
     Loaded: loaded (/usr/lib/systemd/system/sshd-keygen@.service; disabled; preset: disabled)
    Drop-In: /usr/lib/systemd/system/sshd-keygen@.service.d
             └─disable-sshd-keygen-if-cloud-init-active.conf
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 17:37:28 UTC; 2h 43min ago

Jan 30 16:48:36 localhost systemd[1]: OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
Jan 30 17:37:28 compute-1 systemd[1]: OpenSSH ed25519 Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).

○ sshd-keygen@rsa.service - OpenSSH rsa Server Key Generation
     Loaded: loaded (/usr/lib/systemd/system/sshd-keygen@.service; disabled; preset: disabled)
    Drop-In: /usr/lib/systemd/system/sshd-keygen@.service.d
             └─disable-sshd-keygen-if-cloud-init-active.conf
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 17:37:28 UTC; 2h 43min ago

Jan 30 16:48:36 localhost systemd[1]: OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).
Jan 30 17:37:28 compute-1 systemd[1]: OpenSSH rsa Server Key Generation was skipped because of an unmet condition check (ConditionPathExists=!/run/systemd/generator.early/multi-user.target.wants/cloud-init.target).

● sshd.service - OpenSSH server daemon
     Loaded: loaded (/usr/lib/systemd/system/sshd.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 17:37:28 UTC; 2h 43min ago
       Docs: man:sshd(8)
             man:sshd_config(5)
   Main PID: 129855 (sshd)
         IO: 0B read, 76.0K written
      Tasks: 1 (limit: 100092)
     Memory: 100.0M (peak: 105.7M)
        CPU: 9.429s
     CGroup: /system.slice/sshd.service
             └─129855 "sshd: /usr/sbin/sshd -D [listener] 0 of 10-100 startups"

Jan 30 20:18:52 compute-1 sshd-session[267110]: Received disconnect from 45.227.254.170 port 50420:11:  [preauth]
Jan 30 20:18:52 compute-1 sshd-session[267110]: Disconnected from authenticating user root 45.227.254.170 port 50420 [preauth]
Jan 30 20:20:56 compute-1 sshd-session[267972]: Accepted publickey for zuul from 192.168.122.10 port 48040 ssh2: ECDSA SHA256:Vsj85zhQ6wxGK/NQv0CKfB9EEFik+MjhKazPQpL0sM0
Jan 30 20:20:56 compute-1 sshd-session[267972]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Jan 30 20:21:06 compute-1 sshd-session[268742]: Accepted publickey for zuul from 38.102.83.246 port 49192 ssh2: ECDSA SHA256:Vsj85zhQ6wxGK/NQv0CKfB9EEFik+MjhKazPQpL0sM0
Jan 30 20:21:06 compute-1 sshd-session[268742]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Jan 30 20:21:06 compute-1 sshd-session[268790]: Accepted publickey for zuul from 38.102.83.246 port 49194 ssh2: ECDSA SHA256:Vsj85zhQ6wxGK/NQv0CKfB9EEFik+MjhKazPQpL0sM0
Jan 30 20:21:06 compute-1 sshdUnit syslog.service could not be found.
-session[268742]: pam_unix(sshd:session): session closed for user zuul
Jan 30 20:21:06 compute-1 sshd-session[268790]: pam_unix(sshd:session): session opened for user zuul(uid=1000) by zuul(uid=0)
Jan 30 20:21:06 compute-1 sshd-session[268790]: pam_unix(sshd:session): session closed for user zuul

○ sssd-kcm.service - SSSD Kerberos Cache Manager
     Loaded: loaded (/usr/lib/systemd/system/sssd-kcm.service; indirect; preset: disabled)
     Active: inactive (dead)
TriggeredBy: ● sssd-kcm.socket
       Docs: man:sssd-kcm(5)

○ sssd.service - System Security Services Daemon
     Loaded: loaded (/usr/lib/systemd/system/sssd.service; enabled; preset: enabled)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:48:36 UTC; 3h 32min ago

Jan 30 16:48:36 localhost systemd[1]: System Security Services Daemon was skipped because no trigger condition checks were met.

○ sysstat-collect.service - system activity accounting tool
     Loaded: loaded (/usr/lib/systemd/system/sysstat-collect.service; static)
     Active: inactive (dead)
TriggeredBy: ○ sysstat-collect.timer
       Docs: man:sa1(8)

○ sysstat-summary.service - Generate a daily summary of process accounting
     Loaded: loaded (/usr/lib/systemd/system/sysstat-summary.service; static)
     Active: inactive (dead)
TriggeredBy: ○ sysstat-summary.timer
       Docs: man:sa2(8)

○ sysstat.service - Resets System Activity Logs
     Loaded: loaded (/usr/lib/systemd/system/sysstat.service; enabled; preset: enabled)
     Active: inactive (dead)

○ systemd-ask-password-console.service - Dispatch Password Requests to Console
     Loaded: loaded (/usr/lib/systemd/system/systemd-ask-password-console.service; static)
     Active: inactive (dead)
TriggeredBy: ● systemd-ask-password-console.path
       Docs: man:systemd-ask-password-console.service(8)

○ systemd-ask-password-wall.service - Forward Password Requests to Wall
     Loaded: loaded (/usr/lib/systemd/system/systemd-ask-password-wall.service; static)
     Active: inactive (dead)
TriggeredBy: ● systemd-ask-password-wall.path
       Docs: man:systemd-ask-password-wall.service(8)

○ systemd-binfmt.service - Set Up Additional Binary Formats
     Loaded: loaded (/usr/lib/systemd/system/systemd-binfmt.service; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:systemd-binfmt.service(8)
             man:binfmt.d(5)
             https://docs.kernel.org/admin-guide/binfmt-misc.html
             https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems

Jan 30 16:48:35 localhost systemd[1]: Set Up Additional Binary Formats was skipped because no trigger condition checks were met.

○ systemd-boot-random-seed.service - Update Boot Loader Random Seed
     Loaded: loaded (/usr/lib/systemd/system/systemd-boot-random-seed.service; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:systemd-boot-random-seed.service(8)
             man:random(4)

Jan 30 16:48:35 localhost systemd[1]: Update Boot Loader Random Seed was skipped because no trigger condition checks were met.

● systemd-boot-update.service - Automatic Boot Loader Update
     Loaded: loaded (/usr/lib/systemd/system/systemd-boot-update.service; enabled; preset: enabled)
     Active: active (exited) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:bootctl(1)
   Main PID: 698 (code=exited, status=0/SUCCESS)
        CPU: 7ms

Jan 30 16:48:35 localhost systemd[1]: Starting Automatic Boot Loader Update...
Jan 30 16:48:35 localhost bootctl[698]: Couldn't find EFI system partition, skipping.
Jan 30 16:48:35 localhost systemd[1]: Finished Automatic Boot Loader Update.

○ systemd-firstboot.service - First Boot Wizard
     Loaded: loaded (/usr/lib/systemd/system/systemd-firstboot.service; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:systemd-firstboot(1)

Jan 30 16:48:35 localhost systemd[1]: First Boot Wizard was skipped because of an unmet condition check (ConditionFirstBoot=yes).

○ systemd-fsck-root.service - File System Check on Root Device
     Loaded: loaded (/usr/lib/systemd/system/systemd-fsck-root.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
   Duration: 2.009s
       Docs: man:systemd-fsck-root.service(8)
   Main PID: 552 (code=exited, status=0/SUCCESS)
        CPU: 14ms

Jan 30 16:48:33 localhost systemd[1]: Starting File System Check on /dev/disk/by-uuid/822f14ea-6e7e-41df-b0d8-fbe282d9ded8...
Jan 30 16:48:33 localhost systemd-fsck[554]: /usr/sbin/fsck.xfs: XFS file system.
Jan 30 16:48:33 localhost systemd[1]: Finished File System Check on /dev/disk/by-uuid/822f14ea-6e7e-41df-b0d8-fbe282d9ded8.

● systemd-hostnamed.service - Hostname Service
     Loaded: loaded (/usr/lib/systemd/system/systemd-hostnamed.service; static)
    Drop-In: /usr/lib/systemd/system/systemd-hostnamed.service.d
             └─disable-privatedevices.conf
     Active: active (running) since Fri 2026-01-30 20:21:05 UTC; 19s ago
       Docs: man:systemd-hostnamed.service(8)
             man:hostname(5)
             man:machine-info(5)
             man:org.freedesktop.resolve1(5)
   Main PID: 268676 (systemd-hostnam)
         IO: 0B read, 0B written
      Tasks: 1 (limit: 100092)
     Memory: 2.7M (peak: 3.8M)
        CPU: 118ms
     CGroup: /system.slice/systemd-hostnamed.service
             └─268676 /usr/lib/systemd/systemd-hostnamed

Jan 30 20:21:05 compute-1 systemd[1]: Starting Hostname Service...
Jan 30 20:21:05 compute-1 systemd[1]: Started Hostname Service.

● systemd-hwdb-update.service - Rebuild Hardware Database
     Loaded: loaded (/usr/lib/systemd/system/systemd-hwdb-update.service; static)
     Active: active (exited) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:hwdb(7)
             man:systemd-hwdb(8)
   Main PID: 690 (code=exited, status=0/SUCCESS)
        CPU: 670ms

Jan 30 16:48:35 localhost systemd[1]: Starting Rebuild Hardware Database...
Jan 30 16:48:35 localhost systemd[1]: Finished Rebuild Hardware Database.

○ systemd-initctl.service - initctl Compatibility Daemon
     Loaded: loaded (/usr/lib/systemd/system/systemd-initctl.service; static)
     Active: inactive (dead)
TriggeredBy: ● systemd-initctl.socket
       Docs: man:systemd-initctl.service(8)

● systemd-journal-catalog-update.service - Rebuild Journal Catalog
     Loaded: loaded (/usr/lib/systemd/system/systemd-journal-catalog-update.service; static)
     Active: active (exited) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:systemd-journald.service(8)
             man:journald.conf(5)
   Main PID: 703 (code=exited, status=0/SUCCESS)
        CPU: 18ms

Jan 30 16:48:35 localhost systemd[1]: Starting Rebuild Journal Catalog...
Jan 30 16:48:35 localhost systemd[1]: Finished Rebuild Journal Catalog.

● systemd-journal-flush.service - Flush Journal to Persistent Storage
     Loaded: loaded (/usr/lib/systemd/system/systemd-journal-flush.service; static)
     Active: active (exited) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:systemd-journald.service(8)
             man:journald.conf(5)
   Main PID: 691 (code=exited, status=0/SUCCESS)
        CPU: 8ms

Jan 30 16:48:35 localhost systemd[1]: Starting Flush Journal to Persistent Storage...
Jan 30 16:48:35 localhost systemd[1]: Finished Flush Journal to Persistent Storage.

● systemd-journald.service - Journal Service
     Loaded: loaded (/usr/lib/systemd/system/systemd-journald.service; static)
     Active: active (running) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
TriggeredBy: ● systemd-journald-dev-log.socket
             ● systemd-journald.socket
       Docs: man:systemd-journald.service(8)
             man:journald.conf(5)
   Main PID: 679 (systemd-journal)
     Status: "Processing requests..."
         IO: 0B read, 0B written
      Tasks: 1 (limit: 100092)
     Memory: 129.0M (peak: 134.0M)
        CPU: 19.078s
     CGroup: /system.slice/systemd-journald.service
             └─679 /usr/lib/systemd/systemd-journald

Jan 30 16:48:35 localhost systemd-journald[679]: Journal started
Jan 30 16:48:35 localhost systemd-journald[679]: Runtime Journal (/run/log/journal/bf0bc0bb03de29b24cba1cc9599cf5d0) is 8.0M, max 314.6M, 306.6M free.
Jan 30 16:48:34 localhost systemd[1]: systemd-journald.service: Deactivated successfully.
Jan 30 16:48:35 localhost systemd-journald[679]: Runtime Journal (/run/log/journal/bf0bc0bb03de29b24cba1cc9599cf5d0) is 8.0M, max 314.6M, 306.6M free.
Jan 30 16:48:35 localhost systemd-journald[679]: Received client request to flush runtime journal.

● systemd-logind.service - User Login Management
     Loaded: loaded (/usr/lib/systemd/system/systemd-logind.service; static)
    Drop-In: /usr/lib/systemd/system/systemd-logind.service.d
             └─10-grub2-logind-service.conf
     Active: active (running) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
       Docs: man:sd-login(3)
             man:systemd-logind.service(8)
             man:logind.conf(5)
             man:org.freedesktop.login1(5)
   Main PID: 808 (systemd-logind)
     Status: "Processing requests..."
         IO: 4.0K read, 0B written
      Tasks: 1 (limit: 100092)
     Memory: 6.7M (peak: 7.2M)
        CPU: 3.730s
     CGroup: /system.slice/systemd-logind.service
             └─808 /usr/lib/systemd/systemd-logind

Jan 30 20:18:46 compute-1 systemd-logind[808]: New session 161 of user zuul.
Jan 30 20:18:47 compute-1 systemd-logind[808]: Session 161 logged out. Waiting for processes to exit.
Jan 30 20:18:47 compute-1 systemd-logind[808]: Removed session 161.
Jan 30 20:20:56 compute-1 systemd-logind[808]: New session 162 of user zuul.
Jan 30 20:21:06 compute-1 systemd-logind[808]: New session 163 of user zuul.
Jan 30 20:21:06 compute-1 systemd-logind[808]: New session 164 of user zuul.
Jan 30 20:21:06 compute-1 systemd-logind[808]: Session 163 logged out. Waiting for processes to exit.
Jan 30 20:21:06 compute-1 systemd-logind[808]: Removed session 163.
Jan 30 20:21:06 compute-1 systemd-logind[808]: Session 164 logged out. Waiting for processes to exit.
Jan 30 20:21:06 compute-1 systemd-logind[808]: Removed session 164.

○ systemd-machine-id-commit.service - Commit a transient machine-id on disk
     Loaded: loaded (/usr/lib/systemd/system/systemd-machine-id-commit.service; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:systemd-machine-id-commit.service(8)

Jan 30 16:48:35 localhost systemd[1]: Commit a transient machine-id on disk was skipped because of an unmet condition check (ConditionPathIsMountPoint=/etc/machine-id).

● systemd-machined.service - Virtual Machine and Container Registration Service
     Loaded: loaded (/usr/lib/systemd/system/systemd-machined.service; static)
     Active: active (running) since Fri 2026-01-30 17:39:34 UTC; 2h 41min ago
       Docs: man:systemd-machined.service(8)
             man:org.freedesktop.machine1(5)
   Main PID: 154781 (systemd-machine)
     Status: "Processing requests..."
         IO: 0B read, 0B written
      Tasks: 1 (limit: 100092)
     Memory: 2.1M (peak: 2.7M)
        CPU: 2.684s
     CGroup: /system.slice/systemd-machined.service
             └─154781 /usr/lib/systemd/systemd-machined

Jan 30 20:05:03 compute-1 systemd-machined[154781]: Machine qemu-41-instance-00000043 terminated.
Jan 30 20:05:09 compute-1 systemd-machined[154781]: Machine qemu-40-instance-00000041 terminated.
Jan 30 20:05:20 compute-1 systemd-machined[154781]: New machine qemu-42-instance-00000044.
Jan 30 20:08:52 compute-1 systemd-machined[154781]: New machine qemu-43-instance-00000046.
Jan 30 20:15:27 compute-1 systemd-machined[154781]: Machine qemu-43-instance-00000046 terminated.
Jan 30 20:15:31 compute-1 systemd-machined[154781]: Machine qemu-42-instance-00000044 terminated.
Jan 30 20:18:14 compute-1 systemd-machined[154781]: New machine qemu-44-instance-00000047.
Jan 30 20:18:27 compute-1 systemd-machined[154781]: Machine qemu-44-insUnit systemd-networkd-wait-online.service could not be found.
tance-00000047 terminated.
Jan 30 20:19:55 compute-1 systemd-machined[154781]: New machine qemu-45-instance-00000048.
Jan 30 20:20:21 compute-1 systemd-machined[154781]: Machine qemu-45-instance-00000048 terminated.

● systemd-modules-load.service - Load Kernel Modules
     Loaded: loaded (/usr/lib/systemd/system/systemd-modules-load.service; static)
     Active: active (exited) since Fri 2026-01-30 17:41:01 UTC; 2h 40min ago
       Docs: man:systemd-modules-load.service(8)
             man:modules-load.d(5)
   Main PID: 167515 (code=exited, status=0/SUCCESS)
        CPU: 10ms

Jan 30 17:41:01 compute-1 systemd[1]: Starting Load Kernel Modules...
Jan 30 17:41:01 compute-1 systemd[1]: Finished Load Kernel Modules.

● systemd-network-generator.service - Generate network units from Kernel command line
     Loaded: loaded (/usr/lib/systemd/system/systemd-network-generator.service; enabled; preset: enabled)
     Active: active (exited) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:systemd-network-generator.service(8)
   Main PID: 680 (code=exited, status=0/SUCCESS)
        CPU: 7ms

Jan 30 16:48:35 localhost systemd[1]: Finished Generate network units from Kernel command line.

○ systemd-pcrmachine.service - TPM2 PCR Machine ID Measurement
     Loaded: loaded (/usr/lib/systemd/system/systemd-pcrmachine.service; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:systemd-pcrmachine.service(8)

○ systemd-pcrphase-initrd.service - TPM2 PCR Barrier (initrd)
     Loaded: loaded (/usr/lib/systemd/system/systemd-pcrphase-initrd.service; static)
     Active: inactive (dead)
       Docs: man:systemd-pcrphase-initrd.service(8)

○ systemd-pcrphase-sysinit.service - TPM2 PCR Barrier (Initialization)
     Loaded: loaded (/usr/lib/systemd/system/systemd-pcrphase-sysinit.service; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
       Docs: man:systemd-pcrphase-sysinit.service(8)

Jan 30 16:48:36 localhost systemd[1]: TPM2 PCR Barrier (Initialization) was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f).

○ systemd-pcrphase.service - TPM2 PCR Barrier (User)
     Loaded: loaded (/usr/lib/systemd/system/systemd-pcrphase.service; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:48:37 UTC; 3h 32min ago
       Docs: man:systemd-pcrphase.service(8)

Jan 30 16:48:37 np0005602931.novalocal systemd[1]: TPM2 PCR Barrier (User) was skipped because of an unmet condition check (ConditionPathExists=/sys/firmware/efi/efivars/StubPcrKernelImage-4a67b082-0a4c-41cf-b6c7-440b29bb8c4f).

○ systemd-pstore.service - Platform Persistent Storage Archival
     Loaded: loaded (/usr/lib/systemd/system/systemd-pstore.service; enabled; preset: enabled)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:systemd-pstore(8)

Jan 30 16:48:35 localhost systemd[1]: Platform Persistent Storage Archival was skipped because of an unmet condition check (ConditionDirectoryNotEmpty=/sys/fs/pstore).

● systemd-random-seed.service - Load/Save OS Random Seed
     Loaded: loaded (/usr/lib/systemd/system/systemd-random-seed.service; static)
     Active: active (exited) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:systemd-random-seed.service(8)
             man:random(4)
   Main PID: 692 (code=exited, status=0/SUCCESS)
        CPU: 9ms

Jan 30 16:48:35 localhost systemd[1]: Starting Load/Save OS Random Seed...
Jan 30 16:48:35 localhost systemd[1]: Finished Load/Save OS Random Seed.

● systemd-remount-fs.service - Remount Root and Kernel File Systems
     Loaded: loaded (/usr/lib/systemd/system/systemd-remount-fs.service; enabled-runtime; preset: disabled)
     Active: active (exited) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:systemd-remount-fs.Unit systemd-timesyncd.service could not be found.
Unit systemd-tmpfiles.service could not be found.
service(8)
             https://www.freedesktop.org/wiki/Software/systemd/APIFileSystems
   Main PID: 681 (code=exited, status=0/SUCCESS)
        CPU: 13ms

Jan 30 16:48:35 localhost systemd[1]: Finished Remount Root and Kernel File Systems.

○ systemd-repart.service - Repartition Root Disk
     Loaded: loaded (/usr/lib/systemd/system/systemd-repart.service; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:systemd-repart.service(8)

○ systemd-rfkill.service - Load/Save RF Kill Switch Status
     Loaded: loaded (/usr/lib/systemd/system/systemd-rfkill.service; static)
     Active: inactive (dead)
TriggeredBy: ● systemd-rfkill.socket
       Docs: man:systemd-rfkill.service(8)

● systemd-sysctl.service - Apply Kernel Variables
     Loaded: loaded (/usr/lib/systemd/system/systemd-sysctl.service; static)
     Active: active (exited) since Fri 2026-01-30 17:26:33 UTC; 2h 54min ago
       Docs: man:systemd-sysctl.service(8)
             man:sysctl.d(5)
   Main PID: 45591 (code=exited, status=0/SUCCESS)
        CPU: 16ms

Jan 30 17:26:33 compute-1 systemd[1]: Starting Apply Kernel Variables...
Jan 30 17:26:33 compute-1 systemd[1]: Finished Apply Kernel Variables.

○ systemd-sysext.service - Merge System Extension Images into /usr/ and /opt/
     Loaded: loaded (/usr/lib/systemd/system/systemd-sysext.service; disabled; preset: disabled)
     Active: inactive (dead)
       Docs: man:systemd-sysext.service(8)

● systemd-sysusers.service - Create System Users
     Loaded: loaded (/usr/lib/systemd/system/systemd-sysusers.service; static)
     Active: active (exited) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:sysusers.d(5)
             man:systemd-sysusers.service(8)
   Main PID: 693 (code=exited, status=0/SUCCESS)
        CPU: 20ms

Jan 30 16:48:35 localhost systemd[1]: Starting Create System Users...
Jan 30 16:48:35 localhost systemd[1]: Finished Create System Users.

○ systemd-tmpfiles-clean.service - Cleanup of Temporary Directories
     Loaded: loaded (/usr/lib/systemd/system/systemd-tmpfiles-clean.service; static)
     Active: inactive (dead) since Fri 2026-01-30 17:03:38 UTC; 3h 17min ago
TriggeredBy: ● systemd-tmpfiles-clean.timer
       Docs: man:tmpfiles.d(5)
             man:systemd-tmpfiles(8)
   Main PID: 30335 (code=exited, status=0/SUCCESS)
        CPU: 40ms

Jan 30 17:03:38 compute-1 systemd[1]: Starting Cleanup of Temporary Directories...
Jan 30 17:03:38 compute-1 systemd[1]: systemd-tmpfiles-clean.service: Deactivated successfully.
Jan 30 17:03:38 compute-1 systemd[1]: Finished Cleanup of Temporary Directories.

● systemd-tmpfiles-setup-dev.service - Create Static Device Nodes in /dev
     Loaded: loaded (/usr/lib/systemd/system/systemd-tmpfiles-setup-dev.service; static)
     Active: active (exited) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:tmpfiles.d(5)
             man:systemd-tmpfiles(8)
   Main PID: 695 (code=exited, status=0/SUCCESS)
        CPU: 32ms

Jan 30 16:48:35 localhost systemd[1]: Starting Create Static Device Nodes in /dev...
Jan 30 16:48:35 localhost systemd[1]: Finished Create Static Device Nodes in /dev.

● systemd-tmpfiles-setup.service - Create Volatile Files and Directories
     Loaded: loaded (/usr/lib/systemd/system/systemd-tmpfiles-setup.service; static)
     Active: active (exited) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:tmpfiles.d(5)
             man:systemd-tmpfiles(8)
   Main PID: 699 (code=exited, status=0/SUCCESS)
        CPU: 65ms

Jan 30 16:48:35 localhost systemd[1]: Starting Create Volatile Files and Directories...
Jan 30 16:48:35 localhost systemd[1]: Finished Create Volatile Files and Directories.

● systemd-udev-settle.service - Wait for udev To Complete Device Initialization
     Loaded: loaded (/usr/lib/systemd/system/systemd-udev-settle.service; static)
     Active: active (exited) since Fri 2026-01-30 17:40:56 UTC; 2h 40min ago
       Docs: man:systemd-udev-settle.service(8)
   Main PID: 166609 (code=exited, status=0/SUCCESS)
        CPU: 11ms

Jan 30 17:40:56 compute-1 systemd[1]: Starting Wait for udev To Complete Device Initialization...
Jan 30 17:40:56 compute-1 udevadm[166609]: systemd-udev-settle.service is deprecated. Please fix multipathd.service not to pull it in.
Jan 30 17:40:56 compute-1 systemd[1]: Finished Wait for udev To Complete Device Initialization.

● systemd-udev-trigger.service - Coldplug All udev Devices
     Loaded: loaded (/usr/lib/systemd/system/systemd-udev-trigger.service; static)
    Drop-In: /usr/lib/systemd/system/systemd-udev-trigger.service.d
             └─systemd-udev-trigger-no-reload.conf
     Active: active (exited) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:udev(7)
             man:systemd-udevd.service(8)
   Main PID: 685 (code=exited, status=0/SUCCESS)
        CPU: 74ms

Jan 30 16:48:35 localhost systemd[1]: Finished Coldplug All udev Devices.

● systemd-udevd.service - Rule-based Manager for Device Events and Files
     Loaded: loaded (/usr/lib/systemd/system/systemd-udevd.service; static)
     Active: active (running) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
TriggeredBy: ● systemd-udevd-kernel.socket
             ● systemd-udevd-control.socket
       Docs: man:systemd-udevd.service(8)
             man:udev(7)
   Main PID: 732 (systemd-udevd)
     Status: "Processing with 32 children at max"
         IO: 2.2M read, 0B written
      Tasks: 1
     Memory: 22.6M (peak: 95.4M)
        CPU: 11.910s
     CGroup: /system.slice/systemd-udevd.service
             └─udev
               └─732 /usr/lib/systemd/systemd-udevd

Jan 30 19:48:28 compute-1 systemd-udevd[257363]: Network interface NamePolicy= disabled on kernel command line.
Jan 30 19:54:51 compute-1 systemd-udevd[259453]: Network interface NamePolicy= disabled on kernel command line.
Jan 30 19:54:51 compute-1 systemd-udevd[259456]: Network interface NamePolicy= disabled on kernel command line.
Jan 30 19:58:25 compute-1 systemd-udevd[260585]: Network interface NamePolicy= disabled on kernel command line.
Jan 30 20:05:20 compute-1 systemd-udevd[262700]: Network interface NamePolicy= disabled on kernel command line.
Jan 30 20:08:52 compute-1 systemd-udevd[263823]: Network interface NamePolicy= disabled on kernel command line.
Jan 30 20:15:27 compute-1 systemd-udevd[265651]: Network interface NamePolicy= disabled on kernel command line.
Jan 30 20:18:14 compute-1 systemd-udevd[266688]: Network interface NamePolicy= disabled on kernel command line.
Jan 30 20:18:14 compute-1 systemd-udevd[266692]: Network interface NamePolicy= disabled on kernel command line.
Jan 30 20:19:55 compute-1 systemd-udevd[267493]: Network interface NamePolicy= disabled on kernel command line.

● systemd-update-done.service - Update is Completed
     Loaded: loaded (/usr/lib/systemd/system/systemd-update-done.service; static)
     Active: active (exited) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:systemd-update-done.service(8)
   Main PID: 733 (code=exited, status=0/SUCCESS)
        CPU: 10ms

Jan 30 16:48:35 localhost systemd[1]: Starting Update is Completed...
Jan 30 16:48:35 localhost systemd[1]: Finished Update is Completed.

○ systemd-update-utmp-runlevel.service - Record Runlevel Change in UTMP
     Loaded: loaded (/usr/lib/systemd/system/systemd-update-utmp-runlevel.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:44 UTC; 3h 32min ago
       Docs: man:systemd-update-utmp-runlevel.service(8)
             man:utmp(5)
   Main PID: 1021 (code=exited, status=0/SUCCESS)
        CPU: 8ms

Jan 30 16:48:44 np0005602931.novalocal systemd[1]: Starting Record Runlevel Change in UTMP...
Jan 30 16:48:44 np0005602931.novalocal systemd[1]: systemd-update-utmp-runlevel.service: Deactivated successfully.
Jan 30 16:48:44 np0005602931.novalocal systemd[1]: Finished Record Runlevel Change in UTMP.

● systemd-update-utmp.service - Record System Boot/Shutdown in UTMP
     Loaded: loaded (/usr/lib/systemd/system/systemd-update-utmp.service; static)
     Active: active (exited) Unit tlp.service could not be found.
since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:systemd-update-utmp.service(8)
             man:utmp(5)
   Main PID: 731 (code=exited, status=0/SUCCESS)
        CPU: 6ms

Jan 30 16:48:35 localhost systemd[1]: Starting Record System Boot/Shutdown in UTMP...
Jan 30 16:48:35 localhost systemd[1]: Finished Record System Boot/Shutdown in UTMP.

● systemd-user-sessions.service - Permit User Sessions
     Loaded: loaded (/usr/lib/systemd/system/systemd-user-sessions.service; static)
     Active: active (exited) since Fri 2026-01-30 16:48:43 UTC; 3h 32min ago
       Docs: man:systemd-user-sessions.service(8)
   Main PID: 1008 (code=exited, status=0/SUCCESS)
        CPU: 10ms

Jan 30 16:48:43 np0005602931.novalocal systemd[1]: Starting Permit User Sessions...
Jan 30 16:48:43 np0005602931.novalocal systemd[1]: Finished Permit User Sessions.

○ systemd-vconsole-setup.service - Setup Virtual Console
     Loaded: loaded (/usr/lib/systemd/system/systemd-vconsole-setup.service; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
   Duration: 1.973s
       Docs: man:systemd-vconsole-setup.service(8)
             man:vconsole.conf(5)
   Main PID: 315 (code=exited, status=0/SUCCESS)
        CPU: 176ms

Jan 30 16:48:32 localhost systemd[1]: Finished Setup Virtual Console.
Jan 30 16:48:34 localhost systemd[1]: systemd-vconsole-setup.service: Deactivated successfully.
Jan 30 16:48:34 localhost systemd[1]: Stopped Setup Virtual Console.
Notice: journal has been rotated since unit was started, output may be incomplete.

● tuned.service - Dynamic System Tuning Daemon
     Loaded: loaded (/usr/lib/systemd/system/tuned.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 17:26:23 UTC; 2h 55min ago
       Docs: man:tuned(8)
             man:tuned.conf(5)
             man:tuned-adm(8)
   Main PID: 44283 (tuned)
         IO: 0B read, 0B written
      Tasks: 4 (limit: 100092)
     Memory: 13.7M (peak: 16.1M)
        CPU: 3.479s
     CGroup: /system.slice/tuned.service
             └─44283 /usr/bin/python3 -Es /usr/sbin/tuned -l -P

Jan 30 17:26:23 compute-1 systemd[1]: Starting Dynamic System Tuning Daemon...
Jan 30 17:26:23 compute-1 systemd[1]: Started Dynamic System Tuning Daemon.

○ unbound-anchor.service - update of the root trust anchor for DNSSEC validation in unbound
     Loaded: loaded (/usr/lib/systemd/system/unbound-anchor.service; static)
     Active: inactive (dead)
TriggeredBy: ● unbound-anchor.timer
       Docs: man:unbound-anchor(8)

● user-runtime-dir@1000.service - User Runtime Directory /run/user/1000
     Loaded: loaded (/usr/lib/systemd/system/user-runtime-dir@.service; static)
     Active: active (exited) since Fri 2026-01-30 16:50:03 UTC; 3h 31min ago
       Docs: man:user@.service(5)
   Main PID: 4311 (code=exited, status=0/SUCCESS)
        CPU: 17ms

Jan 30 16:50:03 np0005602931.novalocal systemd[1]: Starting User Runtime Directory /run/user/1000...
Jan 30 16:50:03 np0005602931.novalocal systemd[1]: Finished User Runtime Directory /run/user/1000.

● user@1000.service - User Manager for UID 1000
     Loaded: loaded (/usr/lib/systemd/system/user@.service; static)
    Drop-In: /usr/lib/systemd/system/user@.service.d
             └─10-login-barrier.conf
     Active: active (running) since Fri 2026-01-30 16:50:03 UTC; 3h 31min ago
       Docs: man:user@.service(5)
   Main PID: 4312 (systemd)
     Status: "Ready."
         IO: 676.0K read, 4.0K written
      Tasks: 5
     Memory: 9.1M (peak: 13.4M)
        CPU: 1.881s
     CGroup: /user.slice/user-1000.slice/user@1000.service
             ├─app.slice
             │ └─dbus-broker.service
             │   ├─12376 /usr/bin/dbus-broker-launch --scope user
             │   └─12385 dbus-broker --log 4 --controller 9 --machine-id bf0bc0bb03de29b24cba1cc9599cf5d0 --max-bytes 100000000000000 --max-fds 25000000000000 --max-matches 5000000000
             ├─init.scope
             │ ├─4312 /usr/lib/systemd/systemd --user
             │ └─4314 "(sd-pam)"
             └─user.slice
               └─podman-pause-4a554b53.scope
                 └─12312 catatonit -P

Jan 30 17:02:38 np0005602931.novalocal dbus-broker-launch[12376]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored
Jan 30 17:02:38 np0005602931.novalocal dbus-broker-launch[12376]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored
Jan 30 17:02:38 np0005602931.novalocal systemd[4312]: Started D-Bus User Message Bus.
Jan 30 17:02:38 np0005602931.novalocal dbus-broker-lau[12376]: Ready
Jan 30 17:02:38 np0005602931.novalocal systemd[4312]: selinux: avc:  op=load_policy lsm=selinux seqno=4 res=1
Jan 30 17:02:38 np0005602931.novalocal systemd[4312]: Created slice Slice /user.
Jan 30 17:02:38 np0005602931.novalocal systemd[4312]: podman-12304.scope: unit configures an IP firewall, but not running as root.
Jan 30 17:02:38 np0005602931.novalocal systemd[4312]: (This warning is only shown for the first unit using IP firewalling.)
Jan 30 17:02:38 np0005602931.novalocal systemd[4312]: Started podman-12304.scope.
Jan 30 17:02:38 np0005602931.novalocal systemd[4312]: Started podman-pause-4a554b53.scope.

○ virtinterfaced.service - libvirt interface daemon
     Loaded: loaded (/usr/lib/systemd/system/virtinterfaced.service; disabled; preset: disabled)
     Active: inactive (dead)
TriggeredBy: ○ virtinterfaced.socket
             ○ virtinterfaced-ro.socket
             ○ virtinterfaced-admin.socket
       Docs: man:virtinterfaced(8)
             https://libvirt.org/

○ virtlockd.service - libvirt locking daemon
     Loaded: loaded (/usr/lib/systemd/system/virtlockd.service; disabled; preset: disabled)
     Active: inactive (dead)
TriggeredBy: ● virtlockd.socket
             ○ virtlockd-admin.socket
       Docs: man:virtlockd(8)
             https://libvirt.org/

● virtlogd.service - libvirt logging daemon
     Loaded: loaded (/usr/lib/systemd/system/virtlogd.service; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:39:30 UTC; 2h 41min ago
TriggeredBy: ● virtlogd.socket
             ● virtlogd-admin.socket
       Docs: man:virtlogd(8)
             https://libvirt.org/
   Main PID: 154150 (virtlogd)
         IO: 644.0K read, 61.8M written
      Tasks: 1 (limit: 100092)
     Memory: 4.0M (peak: 10.4M)
        CPU: 17min 39.653s
     CGroup: /system.slice/virtlogd.service
             └─154150 /usr/sbin/virtlogd

Jan 30 17:39:30 compute-1 systemd[1]: Starting libvirt logging daemon...
Jan 30 17:39:30 compute-1 systemd[1]: Started libvirt logging daemon.

○ virtnetworkd.service - libvirt network daemon
     Loaded: loaded (/usr/lib/systemd/system/virtnetworkd.service; disabled; preset: disabled)
     Active: inactive (dead)
TriggeredBy: ○ virtnetworkd.socket
             ○ virtnetworkd-ro.socket
             ○ virtnetworkd-admin.socket
       Docs: man:virtnetworkd(8)
             https://libvirt.org/

● virtnodedevd.service - libvirt nodedev daemon
     Loaded: loaded (/usr/lib/systemd/system/virtnodedevd.service; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:42:20 UTC; 2h 39min ago
TriggeredBy: ● virtnodedevd-ro.socket
             ● virtnodedevd-admin.socket
             ● virtnodedevd.socket
       Docs: man:virtnodedevd(8)
             https://libvirt.org/
   Main PID: 183389 (virtnodedevd)
         IO: 0B read, 0B written
      Tasks: 20 (limit: 100092)
     Memory: 6.0M (peak: 7.4M)
        CPU: 11.107s
     CGroup: /system.slice/virtnodedevd.service
             └─183389 /usr/sbin/virtnodedevd --timeout 120

Jan 30 20:15:27 compute-1 virtnodedevd[183389]: libvirt version: 11.10.0, package: 2.el9 (builder@centos.org, 2025-12-18-15:09:54, )
Jan 30 20:15:27 compute-1 virtnodedevd[183389]: hostname: compute-1
Jan 30 20:15:27 compute-1 virtnodedevd[183389]: ethtool ioctl error on tape32a12e3-0b: No such device
Jan 30 20:15:27 compute-1 virtnodedevd[183389]: ethtool ioctl error on tape32a12e3-0b: No such device
Jan 30 20:15:27 compute-1 virtnodedevd[183389]: ethtool ioctl error on tape32a12e3-0b: No such device
Jan 30 20:15:27 compute-1 virtnodedevd[183389]: ethtool ioctl error on tape32a12e3-0b: No such device
Jan 30 20:15:27 compute-1 virtnodedevd[183389]: ethtool ioctl error on tape32a12e3-0b: No such device
Jan 30 20:15:27 compute-1 virtnodedevd[183389]: ethtool ioctl error on tape32a12e3-0b: No such device
Jan 30 20:15:27 compute-1 virtnodedevd[183389]: ethtool ioctl error on tape32a12e3-0b: No such device
Jan 30 20:15:27 compute-1 virtnodedevd[183389]: ethtool ioctl error on tape32a12e3-0b: No such device

○ virtnwfilterd.service - libvirt nwfilter daemon
     Loaded: loaded (/usr/lib/systemd/system/virtnwfilterd.service; disabled; preset: disabled)
     Active: inactive (dead)
TriggeredBy: ○ virtnwfilterd-admin.socket
             ○ virtnwfilterd.socket
             ○ virtnwfilterd-ro.socket
       Docs: man:virtnwfilterd(8)
             https://libvirt.org/

○ virtproxyd.service - libvirt proxy daemon
     Loaded: loaded (/usr/lib/systemd/system/virtproxyd.service; enabled; preset: disabled)
     Active: inactive (dead) since Fri 2026-01-30 19:27:01 UTC; 54min ago
   Duration: 2min 2.491s
TriggeredBy: ● virtproxyd-admin.socket
             ● virtproxyd.socket
             ● virtproxyd-tls.socket
             ● virtproxyd-ro.socket
       Docs: man:virtproxyd(8)
             https://libvirt.org/
   Main PID: 247610 (code=exited, status=0/SUCCESS)
        CPU: 88ms

Jan 30 19:24:59 compute-1 systemd[1]: Starting libvirt proxy daemon...
Jan 30 19:24:59 compute-1 systemd[1]: Started libvirt proxy daemon.
Jan 30 19:27:01 compute-1 systemd[1]: virtproxyd.service: Deactivated successfully.

● virtqemud.service - libvirt QEMU daemon
     Loaded: loaded (/usr/lib/systemd/system/virtqemud.service; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 17:42:19 UTC; 2h 39min ago
TriggeredBy: ● virtqemud-ro.socket
             ● virtqemud.socket
             ● virtqemud-admin.socket
       Docs: man:virtqemud(8)
             https://libvirt.org/
   Main PID: 183098 (virtqemud)
         IO: 1.1M read, 1.5M written
      Tasks: 21 (limit: 32768)
     Memory: 27.4M (peak: 45.8M)
        CPU: 32.868s
     CGroup: /system.slice/virtqemud.service
             └─183098 /usr/sbin/virtqemud --timeout 120

Jan 30 17:42:19 compute-1 systemd[1]: Started libvirt QEMU daemon.
Jan 30 17:42:21 compute-1 virtqemud[183098]: libvirt version: 11.10.0, package: 2.el9 (builder@centos.org, 2025-12-18-15:09:54, )
Jan 30 17:42:21 compute-1 virtqemud[183098]: hostname: compute-1
Jan 30 17:42:21 compute-1 virtqemud[183098]: End of file while reading data: Input/output error
Jan 30 19:20:55 compute-1 virtqemud[183098]: Domain id=29 name='instance-0000002f' uuid=a8db9ac1-ed34-4d95-9157-6cda3227f928 is tainted: custom-monitor
Jan 30 19:25:05 compute-1 virtqemud[183098]: Domain id=31 name='instance-00000032' uuid=b673b737-e48b-4ee3-81ed-3bba10ce5563 is tainted: custom-monitor
Jan 30 20:21:02 compute-1 virtqemud[183098]: Failed to connect socket to '/var/run/libvirt/virtnetworkd-sock-ro': No such file or directory
Jan 30 20:21:02 compute-1 virtqemud[183098]: Failed to connect socket to '/var/run/libvirt/virtnwfilterd-sock-ro': No such file or directory
Jan 30 20:21:02 compute-1 virtqemud[183098]: Failed to connect socket to '/var/run/libvirt/virtstoraged-sock-ro': No such file or directory
Jan 30 20:21:23 compute-1 virtqemud[183098]: Failed to connect socket to '/var/run/libvirt/virtstoraged-sock-ro': No such file or directory

○ virtsecretd.service - libvirt secret daemon
     Loaded: loaded (/usr/lib/systemd/system/virtsecretd.service; enabled; preset: disabled)
     Active: inactive (dead) since Fri 2026-01-30 17:41:35 UTC; 2h 39min ago
   Duration: 2min 30ms
TriggeredBy: ● virtsecretd-admin.socket
             ● virtsecretd-ro.socket
             ● virtsecretd.socket
       Docs: man:virtsecretd(8)
             https://libvirt.org/
   Main PID: 154999 (code=exited, status=0/SUCCESUnit ypbind.service could not be found.
Unit yppasswdd.service could not be found.
Unit ypserv.service could not be found.
Unit ypxfrd.service could not be found.
S)
        CPU: 38ms

Jan 30 17:39:35 compute-1 systemd[1]: Starting libvirt secret daemon...
Jan 30 17:39:35 compute-1 systemd[1]: Started libvirt secret daemon.
Jan 30 17:41:35 compute-1 systemd[1]: virtsecretd.service: Deactivated successfully.

○ virtstoraged.service - libvirt storage daemon
     Loaded: loaded (/usr/lib/systemd/system/virtstoraged.service; disabled; preset: disabled)
     Active: inactive (dead)
TriggeredBy: ○ virtstoraged.socket
             ○ virtstoraged-ro.socket
             ○ virtstoraged-admin.socket
       Docs: man:virtstoraged(8)
             https://libvirt.org/

● -.slice - Root Slice
     Loaded: loaded
     Active: active since Fri 2026-01-30 16:48:31 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:31 UTC; 3h 32min ago
       Docs: man:systemd.special(7)
      Tasks: 540
     Memory: 3.2G
        CPU: 6h 40min 50.200s
     CGroup: /
             ├─271209 turbostat --debug sleep 10
             ├─271215 sleep 10
             ├─init.scope
             │ └─1 /usr/lib/systemd/systemd --switched-root --system --deserialize 31
             ├─machine.slice
             │ ├─libpod-407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a.scope
             │ │ └─container
             │ │   └─196153 /bin/node_exporter --web.config.file=/etc/node_exporter/node_exporter.yaml --web.disable-exporter-metrics --collector.systemd "--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service" --no-collector.dmi --no-collector.entropy --no-collector.thermal_zone --no-collector.time --no-collector.timex --no-collector.uname --no-collector.stat --no-collector.hwmon --no-collector.os --no-collector.selinux --no-collector.textfile --no-collector.powersupplyclass --no-collector.pressure --no-collector.rapl
             │ ├─libpod-455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b.scope
             │ │ └─container
             │ │   ├─183473 dumb-init --single-child -- kolla_start
             │ │   ├─183475 /usr/bin/python3 /usr/bin/nova-compute
             │ │   ├─213354 /usr/bin/python3 /bin/privsep-helper --config-file /etc/nova/nova.conf --config-file /etc/nova/nova-compute.conf --config-dir /etc/nova/nova.conf.d --privsep_context nova.privsep.sys_admin_pctxt --privsep_sock_path /tmp/tmp33az_fjs/privsep.sock
             │ │   └─213418 /usr/bin/python3 /bin/privsep-helper --config-file /etc/nova/nova.conf --config-file /etc/nova/nova-compute.conf --config-dir /etc/nova/nova.conf.d --privsep_context vif_plug_ovs.privsep.vif_plug --privsep_sock_path /tmp/tmptmqu8bjz/privsep.sock
             │ ├─libpod-4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd.scope
             │ │ └─container
             │ │   ├─193155 dumb-init --single-child -- kolla_start
             │ │   ├─193158 "ceilometer-polling: master process [/usr/bin/ceilometer-polling --polling-namespaces compute --logfile /dev/stdout]"
             │ │   └─193336 "ceilometer-polling: AgentManager worker(0)"
             │ ├─libpod-834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20.scope
             │ │ └─container
             │ │   ├─105006 dumb-init --single-child -- kolla_start
             │ │   ├─105009 "neutron-ovn-metadata-agent (/usr/bin/python3 /usr/bin/neutron-ovn-metadata-agent)"
             │ │   ├─105396 "neutron-ovn-metadata-agent (/usr/bin/python3 /usr/bin/neutron-ovn-metadata-agent)"
             │ │   ├─105526 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.namespace_cmd --privsep_sock_path /tmp/tmph8sg2l6q/privsep.sock
             │ │   ├─212908 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.default --privsep_sock_path /tmp/tmpu4b226u9/privsep.sock
             │ │   └─213561 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.link_cmd --privsep_sock_path /tmp/tmpg6ekdec0/privsep.sock
             │ ├─libpod-883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037.scope
             │ │ └─container
             │ │   ├─251554 dumb-init --single-child -- kolla_start
             │ │   └─251557 /usr/bin/ovn-controller --pidfile unix:/run/openvswitch/db.sock -p /etc/pki/tls/private/ovndb.key -c /etc/pki/tls/certs/ovndb.crt -C /etc/pki/tls/certs/ovndbca.crt
             │ ├─libpod-bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98.scope
             │ │ └─container
             │ │   └─199285 /bin/podman_exporter --web.config.file=/etc/podman_exporter/podman_exporter.yaml
             │ ├─libpod-cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18.scope
             │ │ └─container
             │ │   ├─216789 dumb-init --single-child -- /bin/bash -c "exec /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/20be9363-8d31-4010-8379-2f4db75ec5ee.conf"
             │ │   ├─216791 /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/20be9363-8d31-4010-8379-2f4db75ec5ee.conf
             │ │   └─216793 /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/20be9363-8d31-4010-8379-2f4db75ec5ee.conf
             │ ├─libpod-conmon-cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18.scope
             │ │ └─216787 /usr/bin/conmon --api-version 1 -c cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18 -u cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18/userdata -p /run/containers/storage/overlay-containers/cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18/userdata/pidfile -n neutron-haproxy-ovnmeta-20be9363-8d31-4010-8379-2f4db75ec5ee --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18/userdata/oci-log --conmon-pidfile /run/containers/storage/overlay-containers/cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18
             │ ├─libpod-conmon-ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab.scope
             │ │ └─216861 /usr/bin/conmon --api-version 1 -c ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab -u ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab/userdata -p /run/containers/storage/overlay-containers/ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab/userdata/pidfile -n neutron-haproxy-ovnmeta-311f9b45-49be-4345-9ac8-a1fe5a0b8a53 --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab/userdata/oci-log --conmon-pidfile /run/containers/storage/overlay-containers/ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab
             │ ├─libpod-ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab.scope
             │ │ └─container
             │ │   ├─216863 dumb-init --single-child -- /bin/bash -c "exec /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/311f9b45-49be-4345-9ac8-a1fe5a0b8a53.conf"
             │ │   ├─216866 /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/311f9b45-49be-4345-9ac8-a1fe5a0b8a53.conf
             │ │   └─216868 /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/311f9b45-49be-4345-9ac8-a1fe5a0b8a53.conf
             │ ├─libpod-e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a.scope
             │ │ └─container
             │ │   └─202410 /app/openstack-network-exporter
             │ ├─machine-qemu\x2d7\x2dinstance\x2d0000000e.scope
             │ │ └─libvirt
             │ │   └─216693 /usr/libexec/qemu-kvm -name guest=instance-0000000e,debug-threads=on -S -object "{\"qom-type\":\"secret\",\"id\":\"masterKey0\",\"format\":\"raw\",\"file\":\"/var/lib/libvirt/qemu/domain-7-instance-0000000e/master-key.aes\"}" -machine pc-q35-rhel9.8.0,usb=off,dump-guest-core=off,memory-backend=pc.ram,hpet=off,acpi=on -accel kvm -cpu EPYC-Rome,x2apic=on,tsc-deadline=on,hypervisor=on,tsc-adjust=on,spec-ctrl=on,stibp=on,ssbd=on,cmp-legacy=on,overflow-recov=on,succor=on,ibrs=on,amd-ssbd=on,virt-ssbd=on,lbrv=on,tsc-scale=on,vmcb-clean=on,flushbyasid=on,pause-filter=on,pfthreshold=on,svme-addr-chk=on,lfence-always-serializing=on,xsaves=off -m size=131072k -object "{\"qom-type\":\"memory-backend-ram\",\"id\":\"pc.ram\",\"size\":134217728}" -overcommit mem-lock=off -smp 1,sockets=1,dies=1,clusters=1,cores=1,threads=1 -uuid 79f0f99a-6f26-49ed-ac15-d149313db321 -smbios "type=1,manufacturer=RDO,product=OpenStack Compute,version=27.5.2-0.20260127144738.eaa65f0.el9,serial=79f0f99a-6f26-49ed-ac15-d149313db321,uuid=79f0f99a-6f26-49ed-ac15-d149313db321,family=Virtual Machine" -no-user-config -nodefaults -chardev socket,id=charmonitor,fd=28,server=on,wait=off -mon chardev=charmonitor,id=monitor,mode=control -rtc base=utc,driftfix=slew -global kvm-pit.lost_tick_policy=delay -no-shutdown -boot strict=on -device "{\"driver\":\"pcie-root-port\",\"port\":16,\"chassis\":1,\"id\":\"pci.1\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":17,\"chassis\":2,\"id\":\"pci.2\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":18,\"chassis\":3,\"id\":\"pci.3\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":19,\"chassis\":4,\"id\":\"pci.4\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":20,\"chassis\":5,\"id\":\"pci.5\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":21,\"chassis\":6,\"id\":\"pci.6\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":22,\"chassis\":7,\"id\":\"pci.7\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":23,\"chassis\":8,\"id\":\"pci.8\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":24,\"chassis\":9,\"id\":\"pci.9\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":25,\"chassis\":10,\"id\":\"pci.10\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":26,\"chassis\":11,\"id\":\"pci.11\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":27,\"chassis\":12,\"id\":\"pci.12\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":28,\"chassis\":13,\"id\":\"pci.13\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":29,\"chassis\":14,\"id\":\"pci.14\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":30,\"chassis\":15,\"id\":\"pci.15\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":31,\"chassis\":16,\"id\":\"pci.16\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":32,\"chassis\":17,\"id\":\"pci.17\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":33,\"chassis\":18,\"id\":\"pci.18\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":34,\"chassis\":19,\"id\":\"pci.19\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":35,\"chassis\":20,\"id\":\"pci.20\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":36,\"chassis\":21,\"id\":\"pci.21\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":37,\"chassis\":22,\"id\":\"pci.22\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":38,\"chassis\":23,\"id\":\"pci.23\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":39,\"chassis\":24,\"id\":\"pci.24\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":40,\"chassis\":25,\"id\":\"pci.25\",\"bus\":\"pcie.0\",\"addr\":\"0x5\"}" -device "{\"driver\":\"pcie-pci-bridge\",\"id\":\"pci.26\",\"bus\":\"pci.1\",\"addr\":\"0x0\"}" -device "{\"driver\":\"piix3-usb-uhci\",\"id\":\"usb\",\"bus\":\"pci.26\",\"addr\":\"0x1\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/_base/d8b1339cb6a58d9c630b446503c76b090a4f368e\",\"node-name\":\"libvirt-3-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/79f0f99a-6f26-49ed-ac15-d149313db321/disk\",\"node-name\":\"libvirt-2-storage\",\"auto-read-only\":true,\"discard\":\"unmap\",\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"node-name\":\"libvirt-2-format\",\"read-only\":false,\"cache\":{\"direct\":true,\"no-flush\":false},\"driver\":\"qcow2\",\"file\":\"libvirt-2-storage\",\"backing\":\"libvirt-3-storage\"}" -device "{\"driver\":\"virtio-blk-pci\",\"bus\":\"pci.4\",\"addr\":\"0x0\",\"drive\":\"libvirt-2-format\",\"id\":\"virtio-disk0\",\"bootindex\":1,\"write-cache\":\"on\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/79f0f99a-6f26-49ed-ac15-d149313db321/disk.config\",\"node-name\":\"libvirt-1-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -device "{\"driver\":\"ide-cd\",\"bus\":\"ide.0\",\"drive\":\"libvirt-1-storage\",\"id\":\"sata0-0-0\",\"write-cache\":\"on\"}" -netdev "{\"type\":\"tap\",\"fd\":\"32\",\"vhost\":true,\"vhostfd\":\"37\",\"id\":\"hostnet0\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet0\",\"id\":\"net0\",\"mac\":\"fa:16:3e:28:e9:1d\",\"bus\":\"pci.2\",\"addr\":\"0x0\"}" -netdev "{\"type\":\"tap\",\"fd\":\"38\",\"vhost\":true,\"vhostfd\":\"39\",\"id\":\"hostnet1\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet1\",\"id\":\"net1\",\"mac\":\"fa:16:3e:0b:98:69\",\"bus\":\"pci.3\",\"addr\":\"0x0\"}" -add-fd set=0,fd=30,opaque=serial0-log -chardev pty,id=charserial0,logfile=/dev/fdset/0,logappend=on -device "{\"driver\":\"isa-serial\",\"chardev\":\"charserial0\",\"id\":\"serial0\",\"index\":0}" -device "{\"driver\":\"usb-tablet\",\"id\":\"input0\",\"bus\":\"usb.0\",\"port\":\"1\"}" -audiodev "{\"id\":\"audio1\",\"driver\":\"none\"}" -object "{\"qom-type\":\"tls-creds-x509\",\"id\":\"vnc-tls-creds0\",\"dir\":\"/etc/pki/qemu\",\"endpoint\":\"server\",\"verify-peer\":true}" -vnc "[::0]:0,tls-creds=vnc-tls-creds0,audiodev=audio1" -device "{\"driver\":\"virtio-vga\",\"id\":\"video0\",\"max_outputs\":1,\"bus\":\"pcie.0\",\"addr\":\"0x1\"}" -global ICH9-LPC.noreboot=off -watchdog-action reset -device "{\"driver\":\"virtio-balloon-pci\",\"id\":\"balloon0\",\"bus\":\"pci.5\",\"addr\":\"0x0\"}" -object "{\"qom-type\":\"rng-random\",\"id\":\"objrng0\",\"filename\":\"/dev/urandom\"}" -device "{\"driver\":\"virtio-rng-pci\",\"rng\":\"objrng0\",\"id\":\"rng0\",\"bus\":\"pci.6\",\"addr\":\"0x0\"}" -device "{\"driver\":\"vmcoreinfo\"}" -sandbox on,obsolete=deny,elevateprivileges=deny,spawn=deny,resourcecontrol=deny -msg timestamp=on
             │ └─machine-qemu\x2d8\x2dinstance\x2d00000010.scope
             │   └─libvirt
             │     └─217311 /usr/libexec/qemu-kvm -name guest=instance-00000010,debug-threads=on -S -object "{\"qom-type\":\"secret\",\"id\":\"masterKey0\",\"format\":\"raw\",\"file\":\"/var/lib/libvirt/qemu/domain-8-instance-00000010/master-key.aes\"}" -machine pc-q35-rhel9.8.0,usb=off,dump-guest-core=off,memory-backend=pc.ram,hpet=off,acpi=on -accel kvm -cpu EPYC-Rome,x2apic=on,tsc-deadline=on,hypervisor=on,tsc-adjust=on,spec-ctrl=on,stibp=on,ssbd=on,cmp-legacy=on,overflow-recov=on,succor=on,ibrs=on,amd-ssbd=on,virt-ssbd=on,lbrv=on,tsc-scale=on,vmcb-clean=on,flushbyasid=on,pause-filter=on,pfthreshold=on,svme-addr-chk=on,lfence-always-serializing=on,xsaves=off -m size=131072k -object "{\"qom-type\":\"memory-backend-ram\",\"id\":\"pc.ram\",\"size\":134217728}" -overcommit mem-lock=off -smp 1,sockets=1,dies=1,clusters=1,cores=1,threads=1 -uuid bdd94295-7a8b-44b3-91c8-846d36f784a0 -smbios "type=1,manufacturer=RDO,product=OpenStack Compute,version=27.5.2-0.20260127144738.eaa65f0.el9,serial=bdd94295-7a8b-44b3-91c8-846d36f784a0,uuid=bdd94295-7a8b-44b3-91c8-846d36f784a0,family=Virtual Machine" -no-user-config -nodefaults -chardev socket,id=charmonitor,fd=34,server=on,wait=off -mon chardev=charmonitor,id=monitor,mode=control -rtc base=utc,driftfix=slew -global kvm-pit.lost_tick_policy=delay -no-shutdown -boot strict=on -device "{\"driver\":\"pcie-root-port\",\"port\":16,\"chassis\":1,\"id\":\"pci.1\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":17,\"chassis\":2,\"id\":\"pci.2\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":18,\"chassis\":3,\"id\":\"pci.3\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":19,\"chassis\":4,\"id\":\"pci.4\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":20,\"chassis\":5,\"id\":\"pci.5\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":21,\"chassis\":6,\"id\":\"pci.6\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":22,\"chassis\":7,\"id\":\"pci.7\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":23,\"chassis\":8,\"id\":\"pci.8\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":24,\"chassis\":9,\"id\":\"pci.9\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":25,\"chassis\":10,\"id\":\"pci.10\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":26,\"chassis\":11,\"id\":\"pci.11\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":27,\"chassis\":12,\"id\":\"pci.12\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":28,\"chassis\":13,\"id\":\"pci.13\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":29,\"chassis\":14,\"id\":\"pci.14\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":30,\"chassis\":15,\"id\":\"pci.15\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":31,\"chassis\":16,\"id\":\"pci.16\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":32,\"chassis\":17,\"id\":\"pci.17\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":33,\"chassis\":18,\"id\":\"pci.18\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":34,\"chassis\":19,\"id\":\"pci.19\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":35,\"chassis\":20,\"id\":\"pci.20\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":36,\"chassis\":21,\"id\":\"pci.21\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":37,\"chassis\":22,\"id\":\"pci.22\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":38,\"chassis\":23,\"id\":\"pci.23\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":39,\"chassis\":24,\"id\":\"pci.24\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":40,\"chassis\":25,\"id\":\"pci.25\",\"bus\":\"pcie.0\",\"addr\":\"0x5\"}" -device "{\"driver\":\"pcie-pci-bridge\",\"id\":\"pci.26\",\"bus\":\"pci.1\",\"addr\":\"0x0\"}" -device "{\"driver\":\"piix3-usb-uhci\",\"id\":\"usb\",\"bus\":\"pci.26\",\"addr\":\"0x1\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/_base/d8b1339cb6a58d9c630b446503c76b090a4f368e\",\"node-name\":\"libvirt-3-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/bdd94295-7a8b-44b3-91c8-846d36f784a0/disk\",\"node-name\":\"libvirt-2-storage\",\"auto-read-only\":true,\"discard\":\"unmap\",\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"node-name\":\"libvirt-2-format\",\"read-only\":false,\"cache\":{\"direct\":true,\"no-flush\":false},\"driver\":\"qcow2\",\"file\":\"libvirt-2-storage\",\"backing\":\"libvirt-3-storage\"}" -device "{\"driver\":\"virtio-blk-pci\",\"bus\":\"pci.4\",\"addr\":\"0x0\",\"drive\":\"libvirt-2-format\",\"id\":\"virtio-disk0\",\"bootindex\":1,\"write-cache\":\"on\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/bdd94295-7a8b-44b3-91c8-846d36f784a0/disk.config\",\"node-name\":\"libvirt-1-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -device "{\"driver\":\"ide-cd\",\"bus\":\"ide.0\",\"drive\":\"libvirt-1-storage\",\"id\":\"sata0-0-0\",\"write-cache\":\"on\"}" -netdev "{\"type\":\"tap\",\"fd\":\"39\",\"vhost\":true,\"vhostfd\":\"41\",\"id\":\"hostnet0\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet0\",\"id\":\"net0\",\"mac\":\"fa:16:3e:16:12:2b\",\"bus\":\"pci.2\",\"addr\":\"0x0\"}" -netdev "{\"type\":\"tap\",\"fd\":\"42\",\"vhost\":true,\"vhostfd\":\"47\",\"id\":\"hostnet1\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet1\",\"id\":\"net1\",\"mac\":\"fa:16:3e:9a:66:89\",\"bus\":\"pci.3\",\"addr\":\"0x0\"}" -add-fd set=0,fd=38,opaque=serial0-log -chardev pty,id=charserial0,logfile=/dev/fdset/0,logappend=on -device "{\"driver\":\"isa-serial\",\"chardev\":\"charserial0\",\"id\":\"serial0\",\"index\":0}" -device "{\"driver\":\"usb-tablet\",\"id\":\"input0\",\"bus\":\"usb.0\",\"port\":\"1\"}" -audiodev "{\"id\":\"audio1\",\"driver\":\"none\"}" -object "{\"qom-type\":\"tls-creds-x509\",\"id\":\"vnc-tls-creds0\",\"dir\":\"/etc/pki/qemu\",\"endpoint\":\"server\",\"verify-peer\":true}" -vnc "[::0]:1,tls-creds=vnc-tls-creds0,audiodev=audio1" -device "{\"driver\":\"virtio-vga\",\"id\":\"video0\",\"max_outputs\":1,\"bus\":\"pcie.0\",\"addr\":\"0x1\"}" -global ICH9-LPC.noreboot=off -watchdog-action reset -device "{\"driver\":\"virtio-balloon-pci\",\"id\":\"balloon0\",\"bus\":\"pci.5\",\"addr\":\"0x0\"}" -object "{\"qom-type\":\"rng-random\",\"id\":\"objrng0\",\"filename\":\"/dev/urandom\"}" -device "{\"driver\":\"virtio-rng-pci\",\"rng\":\"objrng0\",\"id\":\"rng0\",\"bus\":\"pci.6\",\"addr\":\"0x0\"}" -device "{\"driver\":\"vmcoreinfo\"}" -sandbox on,obsolete=deny,elevateprivileges=deny,spawn=deny,resourcecontrol=deny -msg timestamp=on
             ├─system.slice
             │ ├─NetworkManager.service
             │ │ └─55963 /usr/sbin/NetworkManager --no-daemon
             │ ├─auditd.service
             │ │ ├─704 /sbin/auditd
             │ │ └─706 /usr/sbin/sedispatch
             │ ├─chronyd.service
             │ │ └─65304 /usr/sbin/chronyd -F 2
             │ ├─crond.service
             │ │ └─1010 /usr/sbin/crond -n
             │ ├─dbus-broker.service
             │ │ ├─765 /usr/bin/dbus-broker-launch --scope system --audit
             │ │ └─776 dbus-broker --log 4 --controller 9 --machine-id bf0bc0bb03de29b24cba1cc9599cf5d0 --max-bytes 536870912 --max-fds 4096 --max-matches 131072 --audit
             │ ├─edpm_ceilometer_agent_compute.service
             │ │ └─193153 /usr/bin/conmon --api-version 1 -c 4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd -u 4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd/userdata -p /run/containers/storage/overlay-containers/4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd/userdata/pidfile -n ceilometer_agent_compute --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd/userdata/oci-log --conmon-pidfile /run/ceilometer_agent_compute.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd
             │ ├─edpm_node_exporter.service
             │ │ └─196151 /usr/bin/conmon --api-version 1 -c 407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a -u 407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a/userdata -p /run/containers/storage/overlay-containers/407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a/userdata/pidfile -n node_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a/userdata/oci-log --conmon-pidfile /run/node_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a
             │ ├─edpm_nova_compute.service
             │ │ └─183471 /usr/bin/conmon --api-version 1 -c 455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b -u 455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b/userdata -p /run/containers/storage/overlay-containers/455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b/userdata/pidfile -n nova_compute --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b/userdata/oci-log --conmon-pidfile /run/nova_compute.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b
             │ ├─edpm_openstack_network_exporter.service
             │ │ └─202408 /usr/bin/conmon --api-version 1 -c e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a -u e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a/userdata -p /run/containers/storage/overlay-containers/e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a/userdata/pidfile -n openstack_network_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a/userdata/oci-log --conmon-pidfile /run/openstack_network_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a
             │ ├─edpm_ovn_controller.service
             │ │ └─251552 /usr/bin/conmon --api-version 1 -c 883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037 -u 883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037/userdata -p /run/containers/storage/overlay-containers/883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037/userdata/pidfile -n ovn_controller --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037/userdata/oci-log --conmon-pidfile /run/ovn_controller.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037
             │ ├─edpm_ovn_metadata_agent.service
             │ │ └─105004 /usr/bin/conmon --api-version 1 -c 834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20 -u 834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20/userdata -p /run/containers/storage/overlay-containers/834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20/userdata/pidfile -n ovn_metadata_agent --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20/userdata/oci-log --conmon-pidfile /run/ovn_metadata_agent.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20
             │ ├─edpm_podman_exporter.service
             │ │ └─199283 /usr/bin/conmon --api-version 1 -c bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98 -u bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98/userdata -p /run/containers/storage/overlay-containers/bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98/userdata/pidfile -n podman_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98/userdata/oci-log --conmon-pidfile /run/podman_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98
             │ ├─gssproxy.service
             │ │ └─879 /usr/sbin/gssproxy -D
             │ ├─irqbalance.service
             │ │ └─790 /usr/sbin/irqbalance
             │ ├─iscsid.service
             │ │ └─169133 /usr/sbin/iscsid -f
             │ ├─multipathd.service
             │ │ └─169292 /sbin/multipathd -d -s
             │ ├─ovs-vswitchd.service
             │ │ └─54264 ovs-vswitchd unix:/var/run/openvswitch/db.sock -vconsole:emer -vsyslog:err -vfile:info --mlockall --user openvswitch:hugetlbfs --no-chdir --log-file=/var/log/openvswitch/ovs-vswitchd.log --pidfile=/var/run/openvswitch/ovs-vswitchd.pid --detach
             │ ├─ovsdb-server.service
             │ │ └─54182 ovsdb-server /etc/openvswitch/conf.db -vconsole:emer -vsyslog:err -vfile:info --remote=punix:/var/run/openvswitch/db.sock --private-key=db:Open_vSwitch,SSL,private_key --certificate=db:Open_vSwitch,SSL,certificate --bootstrap-ca-cert=db:Open_vSwitch,SSL,ca_cert --user openvswitch:hugetlbfs --no-chdir --log-file=/var/log/openvswitch/ovsdb-server.log --pidfile=/var/run/openvswitch/ovsdb-server.pid --detach
             │ ├─podman.service
             │ │ └─199294 /usr/bin/podman --log-level=info system service
             │ ├─polkit.service
             │ │ └─44106 /usr/lib/polkit-1/polkitd --no-debug
             │ ├─rpcbind.service
             │ │ └─702 /usr/bin/rpcbind -w -f
             │ ├─rsyslog.service
             │ │ └─1006 /usr/sbin/rsyslogd -n
             │ ├─sshd.service
             │ │ └─129855 "sshd: /usr/sbin/sshd -D [listener] 0 of 10-100 startups"
             │ ├─system-getty.slice
             │ │ └─getty@tty1.service
             │ │   └─1011 /sbin/agetty -o "-p -- \\u" --noclear - linux
             │ ├─system-serial\x2dgetty.slice
             │ │ └─serial-getty@ttyS0.service
             │ │   └─1012 /sbin/agetty -o "-p -- \\u" --keep-baud 115200,57600,38400,9600 - vt220
             │ ├─systemd-hostnamed.service
             │ │ └─268676 /usr/lib/systemd/systemd-hostnamed
             │ ├─systemd-journald.service
             │ │ └─679 /usr/lib/systemd/systemd-journald
             │ ├─systemd-logind.service
             │ │ └─808 /usr/lib/systemd/systemd-logind
             │ ├─systemd-machined.service
             │ │ └─154781 /usr/lib/systemd/systemd-machined
             │ ├─systemd-udevd.service
             │ │ └─udev
             │ │   └─732 /usr/lib/systemd/systemd-udevd
             │ ├─tuned.service
             │ │ └─44283 /usr/bin/python3 -Es /usr/sbin/tuned -l -P
             │ ├─virtlogd.service
             │ │ └─154150 /usr/sbin/virtlogd
             │ ├─virtnodedevd.service
             │ │ └─183389 /usr/sbin/virtnodedevd --timeout 120
             │ └─virtqemud.service
             │   └─183098 /usr/sbin/virtqemud --timeout 120
             └─user.slice
               └─user-1000.slice
                 ├─session-1.scope
                 │ └─4523 /usr/bin/python3
                 ├─session-100.scope
                 │ ├─247036 "sshd-session: zuul [priv]"
                 │ └─247039 "sshd-session: zuul@notty"
                 ├─session-102.scope
                 │ ├─247152 "sshd-session: zuul [priv]"
                 │ └─247155 "sshd-session: zuul@notty"
                 ├─session-105.scope
                 │ ├─247407 "sshd-session: zuul [priv]"
                 │ └─247410 "sshd-session: zuul@notty"
                 ├─session-107.scope
                 │ ├─247511 "sshd-session: zuul [priv]"
                 │ └─247514 "sshd-session: zuul@notty"
                 ├─session-108.scope
                 │ ├─247824 "sshd-session: zuul [priv]"
                 │ └─247827 "sshd-session: zuul@notty"
                 ├─session-110.scope
                 │ ├─247944 "sshd-session: zuul [priv]"
                 │ └─247977 "sshd-session: zuul@notty"
                 ├─session-111.scope
                 │ ├─248004 "sshd-session: zuul [priv]"
                 │ └─248007 "sshd-session: zuul@notty"
                 ├─session-113.scope
                 │ ├─248105 "sshd-session: zuul [priv]"
                 │ └─248108 "sshd-session: zuul@notty"
                 ├─session-120.scope
                 │ ├─248982 "sshd-session: zuul [priv]"
                 │ └─248985 "sshd-session: zuul@notty"
                 ├─session-122.scope
                 │ ├─249152 "sshd-session: zuul [priv]"
                 │ └─249196 "sshd-session: zuul@notty"
                 ├─session-124.scope
                 │ ├─249529 "sshd-session: zuul [priv]"
                 │ └─249532 "sshd-session: zuul@notty"
                 ├─session-126.scope
                 │ ├─249678 "sshd-session: zuul [priv]"
                 │ └─249681 "sshd-session: zuul@notty"
                 ├─session-128.scope
                 │ ├─249968 "sshd-session: zuul [priv]"
                 │ └─249971 "sshd-session: zuul@notty"
                 ├─session-130.scope
                 │ ├─250070 "sshd-session: zuul [priv]"
                 │ └─250073 "sshd-session: zuul@notty"
                 ├─session-131.scope
                 │ ├─250101 "sshd-session: zuul [priv]"
                 │ └─250104 "sshd-session: zuul@notty"
                 ├─session-133.scope
                 │ ├─250202 "sshd-session: zuul [priv]"
                 │ └─250205 "sshd-session: zuul@notty"
                 ├─session-134.scope
                 │ ├─250290 "sshd-session: zuul [priv]"
                 │ └─250312 "sshd-session: zuul@notty"
                 ├─session-136.scope
                 │ ├─250434 "sshd-session: zuul [priv]"
                 │ └─250449 "sshd-session: zuul@notty"
                 ├─session-162.scope
                 │ ├─267972 "sshd-session: zuul [priv]"
                 │ ├─267975 "sshd-session: zuul@notty"
                 │ ├─267976 sudo bash -c "rm -rf /var/tmp/sos-osp && mkdir /var/tmp/sos-osp && sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp  -p container,openstack_edpm,system,storage,virt"
                 │ ├─268000 /usr/bin/python3 -s /sbin/sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp -p container,openstack_edpm,system,storage,virt
                 │ ├─271208 timeout 15s turbostat --debug sleep 10
                 │ ├─272225 timeout 300s systemctl status --all
                 │ ├─272226 systemctl status --all
                 │ └─272355 /usr/bin/python3 -s /sbin/sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp -p container,openstack_edpm,system,storage,virt
                 ├─session-48.scope
                 │ ├─242639 "sshd-session: zuul [priv]"
                 │ └─242642 "sshd-session: zuul@notty"
                 ├─session-50.scope
                 │ ├─242795 "sshd-session: zuul [priv]"
                 │ └─242798 "sshd-session: zuul@notty"
                 ├─session-51.scope
                 │ ├─242870 "sshd-session: zuul [priv]"
                 │ └─242873 "sshd-session: zuul@notty"
                 ├─session-53.scope
                 │ ├─242971 "sshd-session: zuul [priv]"
                 │ └─242974 "sshd-session: zuul@notty"
                 ├─session-54.scope
                 │ ├─243049 "sshd-session: zuul [priv]"
                 │ └─243052 "sshd-session: zuul@notty"
                 ├─session-56.scope
                 │ ├─243121 "sshd-session: zuul [priv]"
                 │ └─243131 "sshd-session: zuul@notty"
                 ├─session-57.scope
                 │ ├─243214 "sshd-session: zuul [priv]"
                 │ └─243217 "sshd-session: zuul@notty"
                 ├─session-59.scope
                 │ ├─243317 "sshd-session: zuul [priv]"
                 │ └─243320 "sshd-session: zuul@notty"
                 ├─session-60.scope
                 │ ├─243597 "sshd-session: zuul [priv]"
                 │ └─243600 "sshd-session: zuul@notty"
                 ├─session-62.scope
                 │ ├─243757 "sshd-session: zuul [priv]"
                 │ └─243760 "sshd-session: zuul@notty"
                 ├─session-68.scope
                 │ ├─244541 "sshd-session: zuul [priv]"
                 │ └─244560 "sshd-session: zuul@notty"
                 ├─session-70.scope
                 │ ├─244705 "sshd-session: zuul [priv]"
                 │ └─244708 "sshd-session: zuul@notty"
                 ├─session-71.scope
                 │ ├─244735 "sshd-session: zuul [priv]"
                 │ └─244738 "sshd-session: zuul@notty"
                 ├─session-73.scope
                 │ ├─244839 "sshd-session: zuul [priv]"
                 │ └─244842 "sshd-session: zuul@notty"
                 ├─session-74.scope
                 │ ├─244869 "sshd-session: zuul [priv]"
                 │ └─244872 "sshd-session: zuul@notty"
                 ├─session-76.scope
                 │ ├─244930 "sshd-session: zuul [priv]"
                 │ └─244933 "sshd-session: zuul@notty"
                 ├─session-79.scope
                 │ ├─245397 "sshd-session: zuul [priv]"
                 │ └─245400 "sshd-session: zuul@notty"
                 ├─session-81.scope
                 │ ├─245500 "sshd-session: zuul [priv]"
                 │ └─245503 "sshd-session: zuul@notty"
                 ├─session-82.scope
                 │ ├─245530 "sshd-session: zuul [priv]"
                 │ └─245533 "sshd-session: zuul@notty"
                 ├─session-84.scope
                 │ ├─245589 "sshd-session: zuul [priv]"
                 │ └─245592 "sshd-session: zuul@notty"
                 ├─session-85.scope
                 │ ├─245619 "sshd-session: zuul [priv]"
                 │ └─245634 "sshd-session: zuul@notty"
                 ├─session-87.scope
                 │ ├─245770 "sshd-session: zuul [priv]"
                 │ └─245773 "sshd-session: zuul@notty"
                 ├─session-94.scope
                 │ ├─246551 "sshd-session: zuul [priv]"
                 │ └─246554 "sshd-session: zuul@notty"
                 ├─session-96.scope
                 │ ├─246719 "sshd-session: zuul [priv]"
                 │ └─246722 "sshd-session: zuul@notty"
                 ├─session-97.scope
                 │ ├─246796 "sshd-session: zuul [priv]"
                 │ └─246813 "sshd-session: zuul@notty"
                 ├─session-99.scope
                 │ ├─247006 "sshd-session: zuul [priv]"
                 │ └─247009 "sshd-session: zuul@notty"
                 └─user@1000.service
                   ├─app.slice
                   │ └─dbus-broker.service
                   │   ├─12376 /usr/bin/dbus-broker-launch --scope user
                   │   └─12385 dbus-broker --log 4 --controller 9 --machine-id bf0bc0bb03de29b24cba1cc9599cf5d0 --max-bytes 100000000000000 --max-fds 25000000000000 --max-matches 5000000000
                   ├─init.scope
                   │ ├─4312 /usr/lib/systemd/systemd --user
                   │ └─4314 "(sd-pam)"
                   └─user.slice
                     └─podman-pause-4a554b53.scope
                       └─12312 catatonit -P

Jan 30 20:21:01 compute-1 systemd[1]: proc-sys-fs-binfmt_misc.automount: Got automount request for /proc/sys/fs/binfmt_misc, triggered by 268000 (sos)
Jan 30 20:21:01 compute-1 systemd[1]: Mounting Arbitrary Executable File Formats File System...
Jan 30 20:21:01 compute-1 systemd[1]: Mounted Arbitrary Executable File Formats File System.
Jan 30 20:21:05 compute-1 systemd[1]: Starting Hostname Service...
Jan 30 20:21:05 compute-1 systemd[1]: Started Hostname Service.
Jan 30 20:21:06 compute-1 systemd[1]: Started Session 163 of User zuul.
Jan 30 20:21:06 compute-1 systemd[1]: Started Session 164 of User zuul.
Jan 30 20:21:06 compute-1 systemd[1]: session-163.scope: Deactivated successfully.
Jan 30 20:21:06 compute-1 systemd[1]: session-164.scope: Deactivated successfully.
Jan 30 20:21:14 compute-1 systemd[1]: var-lib-containers-storage-overlay-volatile\x2dcheck1692682708-merged.mount: Deactivated successfully.

● machine.slice - Virtual Machine and Container Slice
     Loaded: loaded (/usr/lib/systemd/system/machine.slice; static)
     Active: active since Fri 2026-01-30 17:34:06 UTC; 2h 47min ago
      Until: Fri 2026-01-30 17:34:06 UTC; 2h 47min ago
       Docs: man:systemd.special(7)
         IO: 65.8G read, 3.6G written
      Tasks: 112
     Memory: 1.3G (peak: 4.7G)
        CPU: 3h 42min 25.315s
     CGroup: /machine.slice
             ├─libpod-407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a.scope
             │ └─container
             │   └─196153 /bin/node_exporter --web.config.file=/etc/node_exporter/node_exporter.yaml --web.disable-exporter-metrics --collector.systemd "--collector.systemd.unit-include=(edpm_.*|ovs.*|openvswitch|virt.*|rsyslog)\\.service" --no-collector.dmi --no-collector.entropy --no-collector.thermal_zone --no-collector.time --no-collector.timex --no-collector.uname --no-collector.stat --no-collector.hwmon --no-collector.os --no-collector.selinux --no-collector.textfile --no-collector.powersupplyclass --no-collector.pressure --no-collector.rapl
             ├─libpod-455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b.scope
             │ └─container
             │   ├─183473 dumb-init --single-child -- kolla_start
             │   ├─183475 /usr/bin/python3 /usr/bin/nova-compute
             │   ├─213354 /usr/bin/python3 /bin/privsep-helper --config-file /etc/nova/nova.conf --config-file /etc/nova/nova-compute.conf --config-dir /etc/nova/nova.conf.d --privsep_context nova.privsep.sys_admin_pctxt --privsep_sock_path /tmp/tmp33az_fjs/privsep.sock
             │   └─213418 /usr/bin/python3 /bin/privsep-helper --config-file /etc/nova/nova.conf --config-file /etc/nova/nova-compute.conf --config-dir /etc/nova/nova.conf.d --privsep_context vif_plug_ovs.privsep.vif_plug --privsep_sock_path /tmp/tmptmqu8bjz/privsep.sock
             ├─libpod-4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd.scope
             │ └─container
             │   ├─193155 dumb-init --single-child -- kolla_start
             │   ├─193158 "ceilometer-polling: master process [/usr/bin/ceilometer-polling --polling-namespaces compute --logfile /dev/stdout]"
             │   └─193336 "ceilometer-polling: AgentManager worker(0)"
             ├─libpod-834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20.scope
             │ └─container
             │   ├─105006 dumb-init --single-child -- kolla_start
             │   ├─105009 "neutron-ovn-metadata-agent (/usr/bin/python3 /usr/bin/neutron-ovn-metadata-agent)"
             │   ├─105396 "neutron-ovn-metadata-agent (/usr/bin/python3 /usr/bin/neutron-ovn-metadata-agent)"
             │   ├─105526 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.namespace_cmd --privsep_sock_path /tmp/tmph8sg2l6q/privsep.sock
             │   ├─212908 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.default --privsep_sock_path /tmp/tmpu4b226u9/privsep.sock
             │   └─213561 /usr/bin/python3 /bin/privsep-helper --config-file /etc/neutron/neutron.conf --config-dir /etc/neutron.conf.d --privsep_context neutron.privileged.link_cmd --privsep_sock_path /tmp/tmpg6ekdec0/privsep.sock
             ├─libpod-883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037.scope
             │ └─container
             │   ├─251554 dumb-init --single-child -- kolla_start
             │   └─251557 /usr/bin/ovn-controller --pidfile unix:/run/openvswitch/db.sock -p /etc/pki/tls/private/ovndb.key -c /etc/pki/tls/certs/ovndb.crt -C /etc/pki/tls/certs/ovndbca.crt
             ├─libpod-bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98.scope
             │ └─container
             │   └─199285 /bin/podman_exporter --web.config.file=/etc/podman_exporter/podman_exporter.yaml
             ├─libpod-cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18.scope
             │ └─container
             │   ├─216789 dumb-init --single-child -- /bin/bash -c "exec /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/20be9363-8d31-4010-8379-2f4db75ec5ee.conf"
             │   ├─216791 /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/20be9363-8d31-4010-8379-2f4db75ec5ee.conf
             │   └─216793 /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/20be9363-8d31-4010-8379-2f4db75ec5ee.conf
             ├─libpod-conmon-cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18.scope
             │ └─216787 /usr/bin/conmon --api-version 1 -c cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18 -u cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18/userdata -p /run/containers/storage/overlay-containers/cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18/userdata/pidfile -n neutron-haproxy-ovnmeta-20be9363-8d31-4010-8379-2f4db75ec5ee --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18/userdata/oci-log --conmon-pidfile /run/containers/storage/overlay-containers/cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg cbd13bfbdcc3aa213382af06cde1ca7abdd8b9eb0e9164c91033bad79aa65c18
             ├─libpod-conmon-ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab.scope
             │ └─216861 /usr/bin/conmon --api-version 1 -c ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab -u ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab/userdata -p /run/containers/storage/overlay-containers/ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab/userdata/pidfile -n neutron-haproxy-ovnmeta-311f9b45-49be-4345-9ac8-a1fe5a0b8a53 --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab/userdata/oci-log --conmon-pidfile /run/containers/storage/overlay-containers/ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab
             ├─libpod-ddf08844fb44286c4d894019c3ad02e7b2e01eb8e8bd84d3d04d221d6346ebab.scope
             │ └─container
             │   ├─216863 dumb-init --single-child -- /bin/bash -c "exec /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/311f9b45-49be-4345-9ac8-a1fe5a0b8a53.conf"
             │   ├─216866 /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/311f9b45-49be-4345-9ac8-a1fe5a0b8a53.conf
             │   └─216868 /usr/sbin/haproxy -Ws -f /var/lib/neutron/ovn-metadata-proxy/311f9b45-49be-4345-9ac8-a1fe5a0b8a53.conf
             ├─libpod-e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a.scope
             │ └─container
             │   └─202410 /app/openstack-network-exporter
             ├─machine-qemu\x2d7\x2dinstance\x2d0000000e.scope
             │ └─libvirt
             │   └─216693 /usr/libexec/qemu-kvm -name guest=instance-0000000e,debug-threads=on -S -object "{\"qom-type\":\"secret\",\"id\":\"masterKey0\",\"format\":\"raw\",\"file\":\"/var/lib/libvirt/qemu/domain-7-instance-0000000e/master-key.aes\"}" -machine pc-q35-rhel9.8.0,usb=off,dump-guest-core=off,memory-backend=pc.ram,hpet=off,acpi=on -accel kvm -cpu EPYC-Rome,x2apic=on,tsc-deadline=on,hypervisor=on,tsc-adjust=on,spec-ctrl=on,stibp=on,ssbd=on,cmp-legacy=on,overflow-recov=on,succor=on,ibrs=on,amd-ssbd=on,virt-ssbd=on,lbrv=on,tsc-scale=on,vmcb-clean=on,flushbyasid=on,pause-filter=on,pfthreshold=on,svme-addr-chk=on,lfence-always-serializing=on,xsaves=off -m size=131072k -object "{\"qom-type\":\"memory-backend-ram\",\"id\":\"pc.ram\",\"size\":134217728}" -overcommit mem-lock=off -smp 1,sockets=1,dies=1,clusters=1,cores=1,threads=1 -uuid 79f0f99a-6f26-49ed-ac15-d149313db321 -smbios "type=1,manufacturer=RDO,product=OpenStack Compute,version=27.5.2-0.20260127144738.eaa65f0.el9,serial=79f0f99a-6f26-49ed-ac15-d149313db321,uuid=79f0f99a-6f26-49ed-ac15-d149313db321,family=Virtual Machine" -no-user-config -nodefaults -chardev socket,id=charmonitor,fd=28,server=on,wait=off -mon chardev=charmonitor,id=monitor,mode=control -rtc base=utc,driftfix=slew -global kvm-pit.lost_tick_policy=delay -no-shutdown -boot strict=on -device "{\"driver\":\"pcie-root-port\",\"port\":16,\"chassis\":1,\"id\":\"pci.1\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":17,\"chassis\":2,\"id\":\"pci.2\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":18,\"chassis\":3,\"id\":\"pci.3\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":19,\"chassis\":4,\"id\":\"pci.4\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":20,\"chassis\":5,\"id\":\"pci.5\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":21,\"chassis\":6,\"id\":\"pci.6\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":22,\"chassis\":7,\"id\":\"pci.7\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":23,\"chassis\":8,\"id\":\"pci.8\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":24,\"chassis\":9,\"id\":\"pci.9\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":25,\"chassis\":10,\"id\":\"pci.10\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":26,\"chassis\":11,\"id\":\"pci.11\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":27,\"chassis\":12,\"id\":\"pci.12\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":28,\"chassis\":13,\"id\":\"pci.13\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":29,\"chassis\":14,\"id\":\"pci.14\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":30,\"chassis\":15,\"id\":\"pci.15\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":31,\"chassis\":16,\"id\":\"pci.16\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":32,\"chassis\":17,\"id\":\"pci.17\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":33,\"chassis\":18,\"id\":\"pci.18\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":34,\"chassis\":19,\"id\":\"pci.19\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":35,\"chassis\":20,\"id\":\"pci.20\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":36,\"chassis\":21,\"id\":\"pci.21\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":37,\"chassis\":22,\"id\":\"pci.22\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":38,\"chassis\":23,\"id\":\"pci.23\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":39,\"chassis\":24,\"id\":\"pci.24\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":40,\"chassis\":25,\"id\":\"pci.25\",\"bus\":\"pcie.0\",\"addr\":\"0x5\"}" -device "{\"driver\":\"pcie-pci-bridge\",\"id\":\"pci.26\",\"bus\":\"pci.1\",\"addr\":\"0x0\"}" -device "{\"driver\":\"piix3-usb-uhci\",\"id\":\"usb\",\"bus\":\"pci.26\",\"addr\":\"0x1\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/_base/d8b1339cb6a58d9c630b446503c76b090a4f368e\",\"node-name\":\"libvirt-3-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/79f0f99a-6f26-49ed-ac15-d149313db321/disk\",\"node-name\":\"libvirt-2-storage\",\"auto-read-only\":true,\"discard\":\"unmap\",\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"node-name\":\"libvirt-2-format\",\"read-only\":false,\"cache\":{\"direct\":true,\"no-flush\":false},\"driver\":\"qcow2\",\"file\":\"libvirt-2-storage\",\"backing\":\"libvirt-3-storage\"}" -device "{\"driver\":\"virtio-blk-pci\",\"bus\":\"pci.4\",\"addr\":\"0x0\",\"drive\":\"libvirt-2-format\",\"id\":\"virtio-disk0\",\"bootindex\":1,\"write-cache\":\"on\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/79f0f99a-6f26-49ed-ac15-d149313db321/disk.config\",\"node-name\":\"libvirt-1-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -device "{\"driver\":\"ide-cd\",\"bus\":\"ide.0\",\"drive\":\"libvirt-1-storage\",\"id\":\"sata0-0-0\",\"write-cache\":\"on\"}" -netdev "{\"type\":\"tap\",\"fd\":\"32\",\"vhost\":true,\"vhostfd\":\"37\",\"id\":\"hostnet0\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet0\",\"id\":\"net0\",\"mac\":\"fa:16:3e:28:e9:1d\",\"bus\":\"pci.2\",\"addr\":\"0x0\"}" -netdev "{\"type\":\"tap\",\"fd\":\"38\",\"vhost\":true,\"vhostfd\":\"39\",\"id\":\"hostnet1\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet1\",\"id\":\"net1\",\"mac\":\"fa:16:3e:0b:98:69\",\"bus\":\"pci.3\",\"addr\":\"0x0\"}" -add-fd set=0,fd=30,opaque=serial0-log -chardev pty,id=charserial0,logfile=/dev/fdset/0,logappend=on -device "{\"driver\":\"isa-serial\",\"chardev\":\"charserial0\",\"id\":\"serial0\",\"index\":0}" -device "{\"driver\":\"usb-tablet\",\"id\":\"input0\",\"bus\":\"usb.0\",\"port\":\"1\"}" -audiodev "{\"id\":\"audio1\",\"driver\":\"none\"}" -object "{\"qom-type\":\"tls-creds-x509\",\"id\":\"vnc-tls-creds0\",\"dir\":\"/etc/pki/qemu\",\"endpoint\":\"server\",\"verify-peer\":true}" -vnc "[::0]:0,tls-creds=vnc-tls-creds0,audiodev=audio1" -device "{\"driver\":\"virtio-vga\",\"id\":\"video0\",\"max_outputs\":1,\"bus\":\"pcie.0\",\"addr\":\"0x1\"}" -global ICH9-LPC.noreboot=off -watchdog-action reset -device "{\"driver\":\"virtio-balloon-pci\",\"id\":\"balloon0\",\"bus\":\"pci.5\",\"addr\":\"0x0\"}" -object "{\"qom-type\":\"rng-random\",\"id\":\"objrng0\",\"filename\":\"/dev/urandom\"}" -device "{\"driver\":\"virtio-rng-pci\",\"rng\":\"objrng0\",\"id\":\"rng0\",\"bus\":\"pci.6\",\"addr\":\"0x0\"}" -device "{\"driver\":\"vmcoreinfo\"}" -sandbox on,obsolete=deny,elevateprivileges=deny,spawn=deny,resourcecontrol=deny -msg timestamp=on
             └─machine-qemu\x2d8\x2dinstance\x2d00000010.scope
               └─libvirt
                 └─217311 /usr/libexec/qemu-kvm -name guest=instance-00000010,debug-threads=on -S -object "{\"qom-type\":\"secret\",\"id\":\"masterKey0\",\"format\":\"raw\",\"file\":\"/var/lib/libvirt/qemu/domain-8-instance-00000010/master-key.aes\"}" -machine pc-q35-rhel9.8.0,usb=off,dump-guest-core=off,memory-backend=pc.ram,hpet=off,acpi=on -accel kvm -cpu EPYC-Rome,x2apic=on,tsc-deadline=on,hypervisor=on,tsc-adjust=on,spec-ctrl=on,stibp=on,ssbd=on,cmp-legacy=on,overflow-recov=on,succor=on,ibrs=on,amd-ssbd=on,virt-ssbd=on,lbrv=on,tsc-scale=on,vmcb-clean=on,flushbyasid=on,pause-filter=on,pfthreshold=on,svme-addr-chk=on,lfence-always-serializing=on,xsaves=off -m size=131072k -object "{\"qom-type\":\"memory-backend-ram\",\"id\":\"pc.ram\",\"size\":134217728}" -overcommit mem-lock=off -smp 1,sockets=1,dies=1,clusters=1,cores=1,threads=1 -uuid bdd94295-7a8b-44b3-91c8-846d36f784a0 -smbios "type=1,manufacturer=RDO,product=OpenStack Compute,version=27.5.2-0.20260127144738.eaa65f0.el9,serial=bdd94295-7a8b-44b3-91c8-846d36f784a0,uuid=bdd94295-7a8b-44b3-91c8-846d36f784a0,family=Virtual Machine" -no-user-config -nodefaults -chardev socket,id=charmonitor,fd=34,server=on,wait=off -mon chardev=charmonitor,id=monitor,mode=control -rtc base=utc,driftfix=slew -global kvm-pit.lost_tick_policy=delay -no-shutdown -boot strict=on -device "{\"driver\":\"pcie-root-port\",\"port\":16,\"chassis\":1,\"id\":\"pci.1\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":17,\"chassis\":2,\"id\":\"pci.2\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":18,\"chassis\":3,\"id\":\"pci.3\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":19,\"chassis\":4,\"id\":\"pci.4\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":20,\"chassis\":5,\"id\":\"pci.5\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":21,\"chassis\":6,\"id\":\"pci.6\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":22,\"chassis\":7,\"id\":\"pci.7\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":23,\"chassis\":8,\"id\":\"pci.8\",\"bus\":\"pcie.0\",\"addr\":\"0x2.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":24,\"chassis\":9,\"id\":\"pci.9\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":25,\"chassis\":10,\"id\":\"pci.10\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":26,\"chassis\":11,\"id\":\"pci.11\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":27,\"chassis\":12,\"id\":\"pci.12\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":28,\"chassis\":13,\"id\":\"pci.13\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":29,\"chassis\":14,\"id\":\"pci.14\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":30,\"chassis\":15,\"id\":\"pci.15\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":31,\"chassis\":16,\"id\":\"pci.16\",\"bus\":\"pcie.0\",\"addr\":\"0x3.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":32,\"chassis\":17,\"id\":\"pci.17\",\"bus\":\"pcie.0\",\"multifunction\":true,\"addr\":\"0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":33,\"chassis\":18,\"id\":\"pci.18\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x1\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":34,\"chassis\":19,\"id\":\"pci.19\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x2\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":35,\"chassis\":20,\"id\":\"pci.20\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x3\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":36,\"chassis\":21,\"id\":\"pci.21\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x4\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":37,\"chassis\":22,\"id\":\"pci.22\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x5\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":38,\"chassis\":23,\"id\":\"pci.23\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x6\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":39,\"chassis\":24,\"id\":\"pci.24\",\"bus\":\"pcie.0\",\"addr\":\"0x4.0x7\"}" -device "{\"driver\":\"pcie-root-port\",\"port\":40,\"chassis\":25,\"id\":\"pci.25\",\"bus\":\"pcie.0\",\"addr\":\"0x5\"}" -device "{\"driver\":\"pcie-pci-bridge\",\"id\":\"pci.26\",\"bus\":\"pci.1\",\"addr\":\"0x0\"}" -device "{\"driver\":\"piix3-usb-uhci\",\"id\":\"usb\",\"bus\":\"pci.26\",\"addr\":\"0x1\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/_base/d8b1339cb6a58d9c630b446503c76b090a4f368e\",\"node-name\":\"libvirt-3-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/bdd94295-7a8b-44b3-91c8-846d36f784a0/disk\",\"node-name\":\"libvirt-2-storage\",\"auto-read-only\":true,\"discard\":\"unmap\",\"cache\":{\"direct\":true,\"no-flush\":false}}" -blockdev "{\"node-name\":\"libvirt-2-format\",\"read-only\":false,\"cache\":{\"direct\":true,\"no-flush\":false},\"driver\":\"qcow2\",\"file\":\"libvirt-2-storage\",\"backing\":\"libvirt-3-storage\"}" -device "{\"driver\":\"virtio-blk-pci\",\"bus\":\"pci.4\",\"addr\":\"0x0\",\"drive\":\"libvirt-2-format\",\"id\":\"virtio-disk0\",\"bootindex\":1,\"write-cache\":\"on\"}" -blockdev "{\"driver\":\"file\",\"filename\":\"/var/lib/nova/instances/bdd94295-7a8b-44b3-91c8-846d36f784a0/disk.config\",\"node-name\":\"libvirt-1-storage\",\"read-only\":true,\"cache\":{\"direct\":true,\"no-flush\":false}}" -device "{\"driver\":\"ide-cd\",\"bus\":\"ide.0\",\"drive\":\"libvirt-1-storage\",\"id\":\"sata0-0-0\",\"write-cache\":\"on\"}" -netdev "{\"type\":\"tap\",\"fd\":\"39\",\"vhost\":true,\"vhostfd\":\"41\",\"id\":\"hostnet0\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet0\",\"id\":\"net0\",\"mac\":\"fa:16:3e:16:12:2b\",\"bus\":\"pci.2\",\"addr\":\"0x0\"}" -netdev "{\"type\":\"tap\",\"fd\":\"42\",\"vhost\":true,\"vhostfd\":\"47\",\"id\":\"hostnet1\"}" -device "{\"driver\":\"virtio-net-pci\",\"rx_queue_size\":512,\"host_mtu\":1342,\"netdev\":\"hostnet1\",\"id\":\"net1\",\"mac\":\"fa:16:3e:9a:66:89\",\"bus\":\"pci.3\",\"addr\":\"0x0\"}" -add-fd set=0,fd=38,opaque=serial0-log -chardev pty,id=charserial0,logfile=/dev/fdset/0,logappend=on -device "{\"driver\":\"isa-serial\",\"chardev\":\"charserial0\",\"id\":\"serial0\",\"index\":0}" -device "{\"driver\":\"usb-tablet\",\"id\":\"input0\",\"bus\":\"usb.0\",\"port\":\"1\"}" -audiodev "{\"id\":\"audio1\",\"driver\":\"none\"}" -object "{\"qom-type\":\"tls-creds-x509\",\"id\":\"vnc-tls-creds0\",\"dir\":\"/etc/pki/qemu\",\"endpoint\":\"server\",\"verify-peer\":true}" -vnc "[::0]:1,tls-creds=vnc-tls-creds0,audiodev=audio1" -device "{\"driver\":\"virtio-vga\",\"id\":\"video0\",\"max_outputs\":1,\"bus\":\"pcie.0\",\"addr\":\"0x1\"}" -global ICH9-LPC.noreboot=off -watchdog-action reset -device "{\"driver\":\"virtio-balloon-pci\",\"id\":\"balloon0\",\"bus\":\"pci.5\",\"addr\":\"0x0\"}" -object "{\"qom-type\":\"rng-random\",\"id\":\"objrng0\",\"filename\":\"/dev/urandom\"}" -device "{\"driver\":\"virtio-rng-pci\",\"rng\":\"objrng0\",\"id\":\"rng0\",\"bus\":\"pci.6\",\"addr\":\"0x0\"}" -device "{\"driver\":\"vmcoreinfo\"}" -sandbox on,obsolete=deny,elevateprivileges=deny,spawn=deny,resourcecontrol=deny -msg timestamp=on

Jan 30 20:19:56 compute-1 neutron-haproxy-ovnmeta-b785f2d9-25d0-4cca-b23e-d2683fa2d149[267574]: [NOTICE]   (267578) : Loading success.
Jan 30 20:20:21 compute-1 neutron-haproxy-ovnmeta-b785f2d9-25d0-4cca-b23e-d2683fa2d149[267574]: [NOTICE]   (267578) : haproxy version is 2.8.14-c23fe91
Jan 30 20:20:21 compute-1 neutron-haproxy-ovnmeta-b785f2d9-25d0-4cca-b23e-d2683fa2d149[267574]: [NOTICE]   (267578) : path to executable is /usr/sbin/haproxy
Jan 30 20:20:21 compute-1 neutron-haproxy-ovnmeta-b785f2d9-25d0-4cca-b23e-d2683fa2d149[267574]: [WARNING]  (267578) : Exiting Master process...
Jan 30 20:20:21 compute-1 neutron-haproxy-ovnmeta-b785f2d9-25d0-4cca-b23e-d2683fa2d149[267574]: [WARNING]  (267578) : Exiting Master process...
Jan 30 20:20:21 compute-1 neutron-haproxy-ovnmeta-b785f2d9-25d0-4cca-b23e-d2683fa2d149[267574]: [ALERT]    (267578) : Current worker (267580) exited with code 143 (Terminated)
Jan 30 20:20:21 compute-1 neutron-haproxy-ovnmeta-b785f2d9-25d0-4cca-b23e-d2683fa2d149[267574]: [WARNING]  (267578) : All workers exited. Exiting... (0)
Jan 30 20:20:21 compute-1 podman[267696]: 2026-01-30 20:20:21.57347368 +0000 UTC m=+0.061943454 container died 1716d37968352c0203286fc8a43824cdeea3ac700ae1587b961e853a638cb90c (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-b785f2d9-25d0-4cca-b23e-d2683fa2d149, maintainer=OpenStack Kubernetes Operator team, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, org.label-schema.license=GPLv2, io.buildah.version=1.41.3, org.label-schema.build-date=20260127, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true)
Jan 30 20:20:21 compute-1 podman[267696]: 2026-01-30 20:20:21.621727486 +0000 UTC m=+0.110197250 container cleanup 1716d37968352c0203286fc8a43824cdeea3ac700ae1587b961e853a638cb90c (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-b785f2d9-25d0-4cca-b23e-d2683fa2d149, io.buildah.version=1.41.3, maintainer=OpenStack Kubernetes Operator team, org.label-schema.build-date=20260127, org.label-schema.schema-version=1.0, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, tcib_managed=true)
Jan 30 20:20:21 compute-1 podman[267741]: 2026-01-30 20:20:21.701020962 +0000 UTC m=+0.056977825 container remove 1716d37968352c0203286fc8a43824cdeea3ac700ae1587b961e853a638cb90c (image=quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified, name=neutron-haproxy-ovnmeta-b785f2d9-25d0-4cca-b23e-d2683fa2d149, maintainer=OpenStack Kubernetes Operator team, org.label-schema.schema-version=1.0, io.buildah.version=1.41.3, tcib_build_tag=b85d0548925081ae8c6bdd697658cec4, tcib_managed=true, org.label-schema.build-date=20260127, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS)

● system-dbus\x2d:1.1\x2dorg.fedoraproject.SetroubleshootPrivileged.slice - Slice /system/dbus-:1.1-org.fedoraproject.SetroubleshootPrivileged
     Loaded: loaded
     Active: active since Fri 2026-01-30 17:39:32 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:32 UTC; 2h 41min ago
         IO: 4.0K read, 0B written
      Tasks: 0
     Memory: 8.0K (peak: 58.7M)
        CPU: 929ms
     CGroup: /system.slice/system-dbus\x2d:1.1\x2dorg.fedoraproject.SetroubleshootPrivileged.slice

Jan 30 17:39:32 compute-1 systemd[1]: Created slice Slice /system/dbus-:1.1-org.fedoraproject.SetroubleshootPrivileged.

● system-getty.slice - Slice /system/getty
     Loaded: loaded
     Active: active since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
         IO: 0B read, 0B written
      Tasks: 1
     Memory: 296.0K (peak: 776.0K)
        CPU: 8ms
     CGroup: /system.slice/system-getty.slice
             └─getty@tty1.service
               └─1011 /sbin/agetty -o "-p -- \\u" --noclear - linux

● system-modprobe.slice - Slice /system/modprobe
     Loaded: loaded
     Active: active since Fri 2026-01-30 16:48:32 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:32 UTC; 3h 32min ago
         IO: 0B read, 0B written
      Tasks: 0
     Memory: 412.0K (peak: 11.4M)
        CPU: 125ms
     CGroup: /system.slice/system-modprobe.slice

Jan 30 16:48:32 localhost systemd[1]: Created slice Slice /system/modprobe.

● system-serial\x2dgetty.slice - Slice /system/serial-getty
     Loaded: loaded
     Active: active since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
         IO: 0B read, 0B written
      Tasks: 1
     Memory: 248.0K (peak: 504.0K)
        CPU: 7ms
     CGroup: /system.slice/system-serial\x2dgetty.slice
             └─serial-getty@ttyS0.service
               └─1012 /sbin/agetty -o "-p -- \\u" --keep-baud 115200,57600,38400,9600 - vt220

● system-sshd\x2dkeygen.slice - Slice /system/sshd-keygen
     Loaded: loaded
     Active: active since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
         IO: 0B read, 0B written
      Tasks: 0
     Memory: 0B (peak: 0B)
        CPU: 0
     CGroup: /system.slice/system-sshd\x2dkeygen.slice

● system.slice - System Slice
     Loaded: loaded
     Active: active since Fri 2026-01-30 16:48:31 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:31 UTC; 3h 32min ago
       Docs: man:systemd.special(7)
         IO: 74.4M read, 383.9M written
      Tasks: 129
     Memory: 953.0M (peak: 1.2G)
        CPU: 28min 8.507s
     CGroup: /system.slice
             ├─NetworkManager.service
             │ └─55963 /usr/sbin/NetworkManager --no-daemon
             ├─auditd.service
             │ ├─704 /sbin/auditd
             │ └─706 /usr/sbin/sedispatch
             ├─chronyd.service
             │ └─65304 /usr/sbin/chronyd -F 2
             ├─crond.service
             │ └─1010 /usr/sbin/crond -n
             ├─dbus-broker.service
             │ ├─765 /usr/bin/dbus-broker-launch --scope system --audit
             │ └─776 dbus-broker --log 4 --controller 9 --machine-id bf0bc0bb03de29b24cba1cc9599cf5d0 --max-bytes 536870912 --max-fds 4096 --max-matches 131072 --audit
             ├─edpm_ceilometer_agent_compute.service
             │ └─193153 /usr/bin/conmon --api-version 1 -c 4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd -u 4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd/userdata -p /run/containers/storage/overlay-containers/4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd/userdata/pidfile -n ceilometer_agent_compute --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd/userdata/oci-log --conmon-pidfile /run/ceilometer_agent_compute.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd
             ├─edpm_node_exporter.service
             │ └─196151 /usr/bin/conmon --api-version 1 -c 407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a -u 407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a/userdata -p /run/containers/storage/overlay-containers/407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a/userdata/pidfile -n node_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a/userdata/oci-log --conmon-pidfile /run/node_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a
             ├─edpm_nova_compute.service
             │ └─183471 /usr/bin/conmon --api-version 1 -c 455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b -u 455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b/userdata -p /run/containers/storage/overlay-containers/455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b/userdata/pidfile -n nova_compute --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b/userdata/oci-log --conmon-pidfile /run/nova_compute.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 455dca0f327e5b1da1cd1cb8c0cc19d961216e4814bf06622139b34f7df7859b
             ├─edpm_openstack_network_exporter.service
             │ └─202408 /usr/bin/conmon --api-version 1 -c e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a -u e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a/userdata -p /run/containers/storage/overlay-containers/e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a/userdata/pidfile -n openstack_network_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a/userdata/oci-log --conmon-pidfile /run/openstack_network_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a
             ├─edpm_ovn_controller.service
             │ └─251552 /usr/bin/conmon --api-version 1 -c 883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037 -u 883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037/userdata -p /run/containers/storage/overlay-containers/883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037/userdata/pidfile -n ovn_controller --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037/userdata/oci-log --conmon-pidfile /run/ovn_controller.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037
             ├─edpm_ovn_metadata_agent.service
             │ └─105004 /usr/bin/conmon --api-version 1 -c 834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20 -u 834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20/userdata -p /run/containers/storage/overlay-containers/834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20/userdata/pidfile -n ovn_metadata_agent --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20/userdata/oci-log --conmon-pidfile /run/ovn_metadata_agent.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20
             ├─edpm_podman_exporter.service
             │ └─199283 /usr/bin/conmon --api-version 1 -c bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98 -u bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98/userdata -p /run/containers/storage/overlay-containers/bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98/userdata/pidfile -n podman_exporter --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98 --full-attach -s -l journald --log-level warning --syslog --runtime-arg --log-format=json --runtime-arg --log --runtime-arg=/run/containers/storage/overlay-containers/bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98/userdata/oci-log --conmon-pidfile /run/podman_exporter.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg warning --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg "" --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --hooks-dir --exit-command-arg /usr/share/containers/oci/hooks.d --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98
             ├─gssproxy.service
             │ └─879 /usr/sbin/gssproxy -D
             ├─irqbalance.service
             │ └─790 /usr/sbin/irqbalance
             ├─iscsid.service
             │ └─169133 /usr/sbin/iscsid -f
             ├─multipathd.service
             │ └─169292 /sbin/multipathd -d -s
             ├─ovs-vswitchd.service
             │ └─54264 ovs-vswitchd unix:/var/run/openvswitch/db.sock -vconsole:emer -vsyslog:err -vfile:info --mlockall --user openvswitch:hugetlbfs --no-chdir --log-file=/var/log/openvswitch/ovs-vswitchd.log --pidfile=/var/run/openvswitch/ovs-vswitchd.pid --detach
             ├─ovsdb-server.service
             │ └─54182 ovsdb-server /etc/openvswitch/conf.db -vconsole:emer -vsyslog:err -vfile:info --remote=punix:/var/run/openvswitch/db.sock --private-key=db:Open_vSwitch,SSL,private_key --certificate=db:Open_vSwitch,SSL,certificate --bootstrap-ca-cert=db:Open_vSwitch,SSL,ca_cert --user openvswitch:hugetlbfs --no-chdir --log-file=/var/log/openvswitch/ovsdb-server.log --pidfile=/var/run/openvswitch/ovsdb-server.pid --detach
             ├─podman.service
             │ └─199294 /usr/bin/podman --log-level=info system service
             ├─polkit.service
             │ └─44106 /usr/lib/polkit-1/polkitd --no-debug
             ├─rpcbind.service
             │ └─702 /usr/bin/rpcbind -w -f
             ├─rsyslog.service
             │ └─1006 /usr/sbin/rsyslogd -n
             ├─sshd.service
             │ └─129855 "sshd: /usr/sbin/sshd -D [listener] 0 of 10-100 startups"
             ├─system-getty.slice
             │ └─getty@tty1.service
             │   └─1011 /sbin/agetty -o "-p -- \\u" --noclear - linux
             ├─system-serial\x2dgetty.slice
             │ └─serial-getty@ttyS0.service
             │   └─1012 /sbin/agetty -o "-p -- \\u" --keep-baud 115200,57600,38400,9600 - vt220
             ├─systemd-hostnamed.service
             │ └─268676 /usr/lib/systemd/systemd-hostnamed
             ├─systemd-journald.service
             │ └─679 /usr/lib/systemd/systemd-journald
             ├─systemd-logind.service
             │ └─808 /usr/lib/systemd/systemd-logind
             ├─systemd-machined.service
             │ └─154781 /usr/lib/systemd/systemd-machined
             ├─systemd-udevd.service
             │ └─udev
             │   └─732 /usr/lib/systemd/systemd-udevd
             ├─tuned.service
             │ └─44283 /usr/bin/python3 -Es /usr/sbin/tuned -l -P
             ├─virtlogd.service
             │ └─154150 /usr/sbin/virtlogd
             ├─virtnodedevd.service
             │ └─183389 /usr/sbin/virtnodedevd --timeout 120
             └─virtqemud.service
               └─183098 /usr/sbin/virtqemud --timeout 120

Jan 30 20:21:23 compute-1 ceilometer_agent_compute[193153]: 2026-01-30 20:21:23.660 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 381, in ensure_connection
Jan 30 20:21:23 compute-1 ceilometer_agent_compute[193153]: 2026-01-30 20:21:23.660 12 ERROR oslo_messaging.notify.messaging     self._ensure_connection(*args, **kwargs)
Jan 30 20:21:23 compute-1 ceilometer_agent_compute[193153]: 2026-01-30 20:21:23.660 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 433, in _ensure_connection
Jan 30 20:21:23 compute-1 ceilometer_agent_compute[193153]: 2026-01-30 20:21:23.660 12 ERROR oslo_messaging.notify.messaging     return retry_over_time(
Jan 30 20:21:23 compute-1 ceilometer_agent_compute[193153]: 2026-01-30 20:21:23.660 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib64/python3.9/contextlib.py", line 137, in __exit__
Jan 30 20:21:23 compute-1 ceilometer_agent_compute[193153]: 2026-01-30 20:21:23.660 12 ERROR oslo_messaging.notify.messaging     self.gen.throw(typ, value, traceback)
Jan 30 20:21:23 compute-1 ceilometer_agent_compute[193153]: 2026-01-30 20:21:23.660 12 ERROR oslo_messaging.notify.messaging   File "/usr/lib/python3.9/site-packages/kombu/connection.py", line 450, in _reraise_as_library_errors
Jan 30 20:21:23 compute-1 ceilometer_agent_compute[193153]: 2026-01-30 20:21:23.660 12 ERROR oslo_messaging.notify.messaging     raise ConnectionError(str(exc)) from exc
Jan 30 20:21:23 compute-1 ceilometer_agent_compute[193153]: 2026-01-30 20:21:23.660 12 ERROR oslo_messaging.notify.messaging kombu.exceptions.OperationalError: [Errno 111] Connection refused
Jan 30 20:21:23 compute-1 ceilometer_agent_compute[193153]: 2026-01-30 20:21:23.660 12 ERROR oslo_messaging.notify.messaging 

● user-1000.slice - User Slice of UID 1000
     Loaded: loaded
    Drop-In: /usr/lib/systemd/system/user-.slice.d
             └─10-defaults.conf
     Active: active since Fri 2026-01-30 16:50:03 UTC; 3h 31min ago
      Until: Fri 2026-01-30 16:50:03 UTC; 3h 31min ago
       Docs: man:user@.service(5)
         IO: 278.6M read, 6.4G written
      Tasks: 109 (limit: 41288)
     Memory: 5.2G (peak: 5.3G)
        CPU: 15min 54.946s
     CGroup: /user.slice/user-1000.slice
             ├─session-1.scope
             │ └─4523 /usr/bin/python3
             ├─session-100.scope
             │ ├─247036 "sshd-session: zuul [priv]"
             │ └─247039 "sshd-session: zuul@notty"
             ├─session-102.scope
             │ ├─247152 "sshd-session: zuul [priv]"
             │ └─247155 "sshd-session: zuul@notty"
             ├─session-105.scope
             │ ├─247407 "sshd-session: zuul [priv]"
             │ └─247410 "sshd-session: zuul@notty"
             ├─session-107.scope
             │ ├─247511 "sshd-session: zuul [priv]"
             │ └─247514 "sshd-session: zuul@notty"
             ├─session-108.scope
             │ ├─247824 "sshd-session: zuul [priv]"
             │ └─247827 "sshd-session: zuul@notty"
             ├─session-110.scope
             │ ├─247944 "sshd-session: zuul [priv]"
             │ └─247977 "sshd-session: zuul@notty"
             ├─session-111.scope
             │ ├─248004 "sshd-session: zuul [priv]"
             │ └─248007 "sshd-session: zuul@notty"
             ├─session-113.scope
             │ ├─248105 "sshd-session: zuul [priv]"
             │ └─248108 "sshd-session: zuul@notty"
             ├─session-120.scope
             │ ├─248982 "sshd-session: zuul [priv]"
             │ └─248985 "sshd-session: zuul@notty"
             ├─session-122.scope
             │ ├─249152 "sshd-session: zuul [priv]"
             │ └─249196 "sshd-session: zuul@notty"
             ├─session-124.scope
             │ ├─249529 "sshd-session: zuul [priv]"
             │ └─249532 "sshd-session: zuul@notty"
             ├─session-126.scope
             │ ├─249678 "sshd-session: zuul [priv]"
             │ └─249681 "sshd-session: zuul@notty"
             ├─session-128.scope
             │ ├─249968 "sshd-session: zuul [priv]"
             │ └─249971 "sshd-session: zuul@notty"
             ├─session-130.scope
             │ ├─250070 "sshd-session: zuul [priv]"
             │ └─250073 "sshd-session: zuul@notty"
             ├─session-131.scope
             │ ├─250101 "sshd-session: zuul [priv]"
             │ └─250104 "sshd-session: zuul@notty"
             ├─session-133.scope
             │ ├─250202 "sshd-session: zuul [priv]"
             │ └─250205 "sshd-session: zuul@notty"
             ├─session-134.scope
             │ ├─250290 "sshd-session: zuul [priv]"
             │ └─250312 "sshd-session: zuul@notty"
             ├─session-136.scope
             │ ├─250434 "sshd-session: zuul [priv]"
             │ └─250449 "sshd-session: zuul@notty"
             ├─session-162.scope
             │ ├─267972 "sshd-session: zuul [priv]"
             │ ├─267975 "sshd-session: zuul@notty"
             │ ├─267976 sudo bash -c "rm -rf /var/tmp/sos-osp && mkdir /var/tmp/sos-osp && sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp  -p container,openstack_edpm,system,storage,virt"
             │ ├─268000 /usr/bin/python3 -s /sbin/sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp -p container,openstack_edpm,system,storage,virt
             │ ├─271208 timeout 15s turbostat --debug sleep 10
             │ ├─272225 timeout 300s systemctl status --all
             │ ├─272226 systemctl status --all
             │ ├─272358 timeout --foreground 300s virsh -r nodedev-dumpxml usb_1_0_1_0
             │ └─272359 virsh -r nodedev-dumpxml usb_1_0_1_0
             ├─session-48.scope
             │ ├─242639 "sshd-session: zuul [priv]"
             │ └─242642 "sshd-session: zuul@notty"
             ├─session-50.scope
             │ ├─242795 "sshd-session: zuul [priv]"
             │ └─242798 "sshd-session: zuul@notty"
             ├─session-51.scope
             │ ├─242870 "sshd-session: zuul [priv]"
             │ └─242873 "sshd-session: zuul@notty"
             ├─session-53.scope
             │ ├─242971 "sshd-session: zuul [priv]"
             │ └─242974 "sshd-session: zuul@notty"
             ├─session-54.scope
             │ ├─243049 "sshd-session: zuul [priv]"
             │ └─243052 "sshd-session: zuul@notty"
             ├─session-56.scope
             │ ├─243121 "sshd-session: zuul [priv]"
             │ └─243131 "sshd-session: zuul@notty"
             ├─session-57.scope
             │ ├─243214 "sshd-session: zuul [priv]"
             │ └─243217 "sshd-session: zuul@notty"
             ├─session-59.scope
             │ ├─243317 "sshd-session: zuul [priv]"
             │ └─243320 "sshd-session: zuul@notty"
             ├─session-60.scope
             │ ├─243597 "sshd-session: zuul [priv]"
             │ └─243600 "sshd-session: zuul@notty"
             ├─session-62.scope
             │ ├─243757 "sshd-session: zuul [priv]"
             │ └─243760 "sshd-session: zuul@notty"
             ├─session-68.scope
             │ ├─244541 "sshd-session: zuul [priv]"
             │ └─244560 "sshd-session: zuul@notty"
             ├─session-70.scope
             │ ├─244705 "sshd-session: zuul [priv]"
             │ └─244708 "sshd-session: zuul@notty"
             ├─session-71.scope
             │ ├─244735 "sshd-session: zuul [priv]"
             │ └─244738 "sshd-session: zuul@notty"
             ├─session-73.scope
             │ ├─244839 "sshd-session: zuul [priv]"
             │ └─244842 "sshd-session: zuul@notty"
             ├─session-74.scope
             │ ├─244869 "sshd-session: zuul [priv]"
             │ └─244872 "sshd-session: zuul@notty"
             ├─session-76.scope
             │ ├─244930 "sshd-session: zuul [priv]"
             │ └─244933 "sshd-session: zuul@notty"
             ├─session-79.scope
             │ ├─245397 "sshd-session: zuul [priv]"
             │ └─245400 "sshd-session: zuul@notty"
             ├─session-81.scope
             │ ├─245500 "sshd-session: zuul [priv]"
             │ └─245503 "sshd-session: zuul@notty"
             ├─session-82.scope
             │ ├─245530 "sshd-session: zuul [priv]"
             │ └─245533 "sshd-session: zuul@notty"
             ├─session-84.scope
             │ ├─245589 "sshd-session: zuul [priv]"
             │ └─245592 "sshd-session: zuul@notty"
             ├─session-85.scope
             │ ├─245619 "sshd-session: zuul [priv]"
             │ └─245634 "sshd-session: zuul@notty"
             ├─session-87.scope
             │ ├─245770 "sshd-session: zuul [priv]"
             │ └─245773 "sshd-session: zuul@notty"
             ├─session-94.scope
             │ ├─246551 "sshd-session: zuul [priv]"
             │ └─246554 "sshd-session: zuul@notty"
             ├─session-96.scope
             │ ├─246719 "sshd-session: zuul [priv]"
             │ └─246722 "sshd-session: zuul@notty"
             ├─session-97.scope
             │ ├─246796 "sshd-session: zuul [priv]"
             │ └─246813 "sshd-session: zuul@notty"
             ├─session-99.scope
             │ ├─247006 "sshd-session: zuul [priv]"
             │ └─247009 "sshd-session: zuul@notty"
             └─user@1000.service
               ├─app.slice
               │ └─dbus-broker.service
               │   ├─12376 /usr/bin/dbus-broker-launch --scope user
               │   └─12385 dbus-broker --log 4 --controller 9 --machine-id bf0bc0bb03de29b24cba1cc9599cf5d0 --max-bytes 100000000000000 --max-fds 25000000000000 --max-matches 5000000000
               ├─init.scope
               │ ├─4312 /usr/lib/systemd/systemd --user
               │ └─4314 "(sd-pam)"
               └─user.slice
                 └─podman-pause-4a554b53.scope
                   └─12312 catatonit -P

Jan 30 20:15:55 compute-1 sshd-session[265945]: Connection closed by 38.102.83.246 port 48538
Jan 30 20:16:32 compute-1 sshd-session[266128]: Connection closed by 38.102.83.246 port 50282
Jan 30 20:16:33 compute-1 sshd-session[266155]: Connection closed by 38.102.83.246 port 50284
Jan 30 20:18:46 compute-1 sshd-session[267044]: Connection closed by 38.102.83.246 port 33452
Jan 30 20:18:47 compute-1 sshd-session[267071]: Connection closed by 38.102.83.246 port 33466
Jan 30 20:20:56 compute-1 sudo[267976]:     zuul : PWD=/home/zuul ; USER=root ; COMMAND=/bin/bash -c 'rm -rf /var/tmp/sos-osp && mkdir /var/tmp/sos-osp && sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp  -p container,openstack_edpm,system,storage,virt'
Jan 30 20:20:56 compute-1 sudo[267976]: pam_unix(sudo:session): session opened for user root(uid=0) by zuul(uid=1000)
Jan 30 20:21:06 compute-1 sshd-session[268749]: Connection closed by 38.102.83.246 port 49192
Jan 30 20:21:06 compute-1 sshd-session[268821]: Connection closed by 38.102.83.246 port 49194
Jan 30 20:21:14 compute-1 ovs-appctl[270189]: ovs|00001|daemon_unix|WARN|/var/run/openvswitch/ovs-monitor-ipsec.pid: open: No such file or directory

● user.slice - User and Session Slice
     Loaded: loaded (/usr/lib/systemd/system/user.slice; static)
     Active: active since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
       Docs: man:systemd.special(7)
         IO: 278.6M read, 6.4G written
      Tasks: 107
     Memory: 5.2G (peak: 5.3G)
        CPU: 15min 55.960s
     CGroup: /user.slice
             └─user-1000.slice
               ├─session-1.scope
               │ └─4523 /usr/bin/python3
               ├─session-100.scope
               │ ├─247036 "sshd-session: zuul [priv]"
               │ └─247039 "sshd-session: zuul@notty"
               ├─session-102.scope
               │ ├─247152 "sshd-session: zuul [priv]"
               │ └─247155 "sshd-session: zuul@notty"
               ├─session-105.scope
               │ ├─247407 "sshd-session: zuul [priv]"
               │ └─247410 "sshd-session: zuul@notty"
               ├─session-107.scope
               │ ├─247511 "sshd-session: zuul [priv]"
               │ └─247514 "sshd-session: zuul@notty"
               ├─session-108.scope
               │ ├─247824 "sshd-session: zuul [priv]"
               │ └─247827 "sshd-session: zuul@notty"
               ├─session-110.scope
               │ ├─247944 "sshd-session: zuul [priv]"
               │ └─247977 "sshd-session: zuul@notty"
               ├─session-111.scope
               │ ├─248004 "sshd-session: zuul [priv]"
               │ └─248007 "sshd-session: zuul@notty"
               ├─session-113.scope
               │ ├─248105 "sshd-session: zuul [priv]"
               │ └─248108 "sshd-session: zuul@notty"
               ├─session-120.scope
               │ ├─248982 "sshd-session: zuul [priv]"
               │ └─248985 "sshd-session: zuul@notty"
               ├─session-122.scope
               │ ├─249152 "sshd-session: zuul [priv]"
               │ └─249196 "sshd-session: zuul@notty"
               ├─session-124.scope
               │ ├─249529 "sshd-session: zuul [priv]"
               │ └─249532 "sshd-session: zuul@notty"
               ├─session-126.scope
               │ ├─249678 "sshd-session: zuul [priv]"
               │ └─249681 "sshd-session: zuul@notty"
               ├─session-128.scope
               │ ├─249968 "sshd-session: zuul [priv]"
               │ └─249971 "sshd-session: zuul@notty"
               ├─session-130.scope
               │ ├─250070 "sshd-session: zuul [priv]"
               │ └─250073 "sshd-session: zuul@notty"
               ├─session-131.scope
               │ ├─250101 "sshd-session: zuul [priv]"
               │ └─250104 "sshd-session: zuul@notty"
               ├─session-133.scope
               │ ├─250202 "sshd-session: zuul [priv]"
               │ └─250205 "sshd-session: zuul@notty"
               ├─session-134.scope
               │ ├─250290 "sshd-session: zuul [priv]"
               │ └─250312 "sshd-session: zuul@notty"
               ├─session-136.scope
               │ ├─250434 "sshd-session: zuul [priv]"
               │ └─250449 "sshd-session: zuul@notty"
               ├─session-162.scope
               │ ├─267972 "sshd-session: zuul [priv]"
               │ ├─267975 "sshd-session: zuul@notty"
               │ ├─267976 sudo bash -c "rm -rf /var/tmp/sos-osp && mkdir /var/tmp/sos-osp && sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp  -p container,openstack_edpm,system,storage,virt"
               │ ├─268000 /usr/bin/python3 -s /sbin/sos report --batch --all-logs --tmp-dir=/var/tmp/sos-osp -p container,openstack_edpm,system,storage,virt
               │ ├─271208 timeout 15s turbostat --debug sleep 10
               │ ├─272225 timeout 300s systemctl status --all
               │ ├─272226 systemctl status --all
               │ └─272361 "[sos]"
               ├─session-48.scope
               │ ├─242639 "sshd-session: zuul [priv]"
               │ └─242642 "sshd-session: zuul@notty"
               ├─session-50.scope
               │ ├─242795 "sshd-session: zuul [priv]"
               │ └─242798 "sshd-session: zuul@notty"
               ├─session-51.scope
               │ ├─242870 "sshd-session: zuul [priv]"
               │ └─242873 "sshd-session: zuul@notty"
               ├─session-53.scope
               │ ├─242971 "sshd-session: zuul [priv]"
               │ └─242974 "sshd-session: zuul@notty"
               ├─session-54.scope
               │ ├─243049 "sshd-session: zuul [priv]"
               │ └─243052 "sshd-session: zuul@notty"
               ├─session-56.scope
               │ ├─243121 "sshd-session: zuul [priv]"
               │ └─243131 "sshd-session: zuul@notty"
               ├─session-57.scope
               │ ├─243214 "sshd-session: zuul [priv]"
               │ └─243217 "sshd-session: zuul@notty"
               ├─session-59.scope
               │ ├─243317 "sshd-session: zuul [priv]"
               │ └─243320 "sshd-session: zuul@notty"
               ├─session-60.scope
               │ ├─243597 "sshd-session: zuul [priv]"
               │ └─243600 "sshd-session: zuul@notty"
               ├─session-62.scope
               │ ├─243757 "sshd-session: zuul [priv]"
               │ └─243760 "sshd-session: zuul@notty"
               ├─session-68.scope
               │ ├─244541 "sshd-session: zuul [priv]"
               │ └─244560 "sshd-session: zuul@notty"
               ├─session-70.scope
               │ ├─244705 "sshd-session: zuul [priv]"
               │ └─244708 "sshd-session: zuul@notty"
               ├─session-71.scope
               │ ├─244735 "sshd-session: zuul [priv]"
               │ └─244738 "sshd-session: zuul@notty"
               ├─session-73.scope
               │ ├─244839 "sshd-session: zuul [priv]"
               │ └─244842 "sshd-session: zuul@notty"
               ├─session-74.scope
               │ ├─244869 "sshd-session: zuul [priv]"
               │ └─244872 "sshd-session: zuul@notty"
               ├─session-76.scope
               │ ├─244930 "sshd-session: zuul [priv]"
               │ └─244933 "sshd-session: zuul@notty"
               ├─session-79.scope
               │ ├─245397 "sshd-session: zuul [priv]"
               │ └─245400 "sshd-session: zuul@notty"
               ├─session-81.scope
               │ ├─245500 "sshd-session: zuul [priv]"
               │ └─245503 "sshd-session: zuul@notty"
               ├─session-82.scope
               │ ├─245530 "sshd-session: zuul [priv]"
               │ └─245533 "sshd-session: zuul@notty"
               ├─session-84.scope
               │ ├─245589 "sshd-session: zuul [priv]"
               │ └─245592 "sshd-session: zuul@notty"
               ├─session-85.scope
               │ ├─245619 "sshd-session: zuul [priv]"
               │ └─245634 "sshd-session: zuul@notty"
               ├─session-87.scope
               │ ├─245770 "sshd-session: zuul [priv]"
               │ └─245773 "sshd-session: zuul@notty"
               ├─session-94.scope
               │ ├─246551 "sshd-session: zuul [priv]"
               │ └─246554 "sshd-session: zuul@notty"
               ├─session-96.scope
               │ ├─246719 "sshd-session: zuul [priv]"
               │ └─246722 "sshd-session: zuul@notty"
               ├─session-97.scope
               │ ├─246796 "sshd-session: zuul [priv]"
               │ └─246813 "sshd-session: zuul@notty"
               ├─session-99.scope
               │ ├─247006 "sshd-session: zuul [priv]"
               │ └─247009 "sshd-session: zuul@notty"
               └─user@1000.service
                 ├─app.slice
                 │ └─dbus-broker.service
                 │   ├─12376 /usr/bin/dbus-broker-launch --scope user
                 │   └─12385 dbus-broker --log 4 --controller 9 --machine-id bf0bc0bb03de29b24cba1cc9599cf5d0 --max-bytes 100000000000000 --max-fds 25000000000000 --max-matches 5000000000
                 ├─init.scope
                 │ ├─4312 /usr/lib/systemd/systemd --user
                 │ └─4314 "(sd-pam)"
                 └─user.slice
                   └─podman-pause-4a554b53.scope
                     └─12312 catatonit -P

● dbus.socket - D-Bus System Message Bus Socket
     Loaded: loaded (/usr/lib/systemd/system/dbus.socket; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
   Triggers: ● dbus-broker.service
     Listen: /run/dbus/system_bus_socket (Stream)
     CGroup: /system.slice/dbus.socket

Jan 30 16:48:36 localhost systemd[1]: Listening on D-Bus System Message Bus Socket.

● dm-event.socket - Device-mapper event daemon FIFOs
     Loaded: loaded (/usr/lib/systemd/system/dm-event.socket; enabled; preset: enabled)
     Active: active (listening) since Fri 2026-01-30 17:24:26 UTC; 2h 56min ago
      Until: Fri 2026-01-30 17:24:26 UTC; 2h 56min ago
   Triggers: ● dm-event.service
       Docs: man:dmeventd(8)
     Listen: /run/dmeventd-server (FIFO)
             /run/dmeventd-client (FIFO)
     CGroup: /system.slice/dm-event.socket

Jan 30 17:24:26 compute-1 systemd[1]: Listening on Device-mapper event daemon FIFOs.

● iscsid.socket - Open-iSCSI iscsid Socket
     Loaded: loaded (/usr/lib/systemd/system/iscsid.socket; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 17:40:31 UTC; 2h 40min ago
      Until: Fri 2026-01-30 17:40:31 UTC; 2h 40min ago
   Triggers: ● iscsid.service
       Docs: man:iscsid(8)
             man:iscsiadm(8)
     Listen: @ISCSIADM_ABSTRACT_NAMESPACE (Stream)
     CGroup: /system.slice/iscsid.socket

Jan 30 17:40:31 compute-1 systemd[1]: Listening on Open-iSCSI iscsid Socket.

○ iscsiuio.socket - Open-iSCSI iscsiuio Socket
     Loaded: loaded (/usr/lib/systemd/system/iscsiuio.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● iscsiuio.service
       Docs: man:iscsiuio(8)
     Listen: @ISCSID_UIP_ABSTRACT_NAMESPACE (Stream)

○ libvirtd-admin.socket - libvirt legacy monolithic daemon admin socket
     Loaded: loaded (/usr/lib/systemd/system/libvirtd-admin.socket; disabled; preset: disabled)
     Active: inactive (dead)
   Triggers: ● libvirtd.service
     Listen: /run/libvirt/libvirt-admin-sock (Stream)

○ libvirtd-ro.socket - libvirt legacy monolithic daemon read-only socket
     Loaded: loaded (/usr/lib/systemd/system/libvirtd-ro.socket; disabled; preset: disabled)
     Active: inactive (dead)
   Triggers: ● libvirtd.service
     Listen: /run/libvirt/libvirt-sock-ro (Stream)

○ libvirtd-tls.socket
     Loaded: masked (Reason: Unit libvirtd-tls.socket is masked.)
     Active: inactive (dead)

○ libvirtd.socket - libvirt legacy monolithic daemon socket
     Loaded: loaded (/usr/lib/systemd/system/libvirtd.socket; disabled; preset: disabled)
     Active: inactive (dead)
   Triggers: ● libvirtd.service
     Listen: /run/libvirt/libvirt-sock (Stream)

● lvm2-lvmpolld.socket - LVM2 poll daemon socket
     Loaded: loaded (/usr/lib/systemd/system/lvm2-lvmpolld.socket; enabled; preset: enabled)
     Active: active (listening) since Fri 2026-01-30 17:24:27 UTC; 2h 56min ago
      Until: Fri 2026-01-30 17:24:27 UTC; 2h 56min ago
   Triggers: ● lvm2-lvmpolld.service
       Docs: man:lvmpolld(8)
     Listen: /run/lvm/lvmpolld.socket (Stream)
     CGroup: /system.slice/lvm2-lvmpolld.socket

Jan 30 17:24:27 compute-1 systemd[1]: Listening on LVM2 poll daemon socket.

● multipathd.socket - multipathd control socket
     Loaded: loaded (/usr/lib/systemd/system/multipathd.socket; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:40:56 UTC; 2h 40min ago
      Until: Fri 2026-01-30 17:40:56 UTC; 2h 40min ago
   Triggers: ● multipathd.service
     Listen: @/org/kernel/linux/storage/multipathd (Stream)
             /run/multipathd.socket (Stream)
     CGroup: /system.slice/multipathd.socket

Jan 30 17:40:56 compute-1 systemd[1]: Listening on multipathd control socket.

● podman.socket - Podman API Socket
     Loaded: loaded (/usr/lib/systemd/system/podman.socket; enabled; preset: disabled)
     Active: active (running) since Fri 2026-01-30 17:43:06 UTC; 2h 38min ago
      Until: Fri 2026-01-30 17:43:06 UTC; 2h 38min ago
   Triggers: ● podman.service
       Docs: man:podman-system-service(1)
     Listen: /run/podman/podman.sock (Stream)
     CGroup: /system.slice/podman.socket

Jan 30 17:43:06 compute-1 systemd[1]: Listening on Podman API Socket.

● rpcbind.socket - RPCbind Server Activation Socket
     Loaded: loaded (/usr/lib/systemd/system/rpcbind.socket; enabled; preset: enabled)
     Active: active (running) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
   Triggers: ● rpcbind.service
     Listen: /run/rpcbind.sock (Stream)
             0.0.0.0:111 (Stream)
             0.0.0.0:111 (Datagram)
             [::]:111 (Stream)
             [::]:111 (Datagram)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 16.0K (peak: 288.0K)
        CPU: 2ms
     CGroup: /system.slice/rpcbind.socket

● sssd-kcm.socket - SSSD Kerberos Cache Manager responder socket
     Loaded: loaded (/usr/lib/systemd/system/sssd-kcm.socket; enabled; preset: enabled)
     Active: active (listening) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
   Triggers: ● sssd-kcm.service
       Docs: man:sssd-kcm(8)
     Listen: /run/.heim_org.h5l.kcm-socket (Stream)
     CGroup: /system.slice/sssd-kcm.socket

Jan 30 16:48:36 localhost systemd[1]: Listening on SSSD Kerberos Cache Manager responder socket.

○ syslog.socket - Syslog Socket
     Loaded: loaded (/usr/lib/systemd/system/syslog.socket; static)
     Active: inactive (dead)
   Triggers: ● syslog.service
       Docs: man:systemd.special(7)
             https://www.freedesktop.org/wiki/Software/systemd/syslog
     Listen: /run/systemd/journal/syslog (Datagram)

● systemd-coredump.socket - Process Core Dump Socket
     Loaded: loaded (/usr/lib/systemd/system/systemd-coredump.socket; static)
     Active: active (listening) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:systemd-coredump(8)
     Listen: /run/systemd/coredump (SequentialPacket)
   Accepted: 0; Connected: 0;
     CGroup: /system.slice/systemd-coredump.socket

● systemd-initctl.socket - initctl Compatibility Named Pipe
     Loaded: loaded (/usr/lib/systemd/system/systemd-initctl.socket; static)
     Active: active (listening) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
   Triggers: ● systemd-initctl.service
       Docs: man:systemd-initctl.socket(8)
     Listen: /run/initctl (FIFO)
     CGroup: /system.slice/systemd-initctl.socket

● systemd-journald-dev-log.socket - Journal Socket (/dev/log)
     Loaded: loaded (/usr/lib/systemd/system/systemd-journald-dev-log.socket; static)
     Active: active (running) since Fri 2026-01-30 16:48:31 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:31 UTC; 3h 32min ago
   Triggers: ● systemd-journald.service
       Docs: man:systemd-journald.service(8)
             man:journald.conf(5)
     Listen: /run/systemd/journal/dev-log (Datagram)
     CGroup: /system.slice/systemd-journald-dev-log.socket

Notice: journal has been rotated since unit was started, output may be incomplete.

● systemd-journald.socket - Journal Socket
     Loaded: loaded (/usr/lib/systemd/system/systemd-journald.socket; static)
     Active: active (running) since Fri 2026-01-30 16:48:31 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:31 UTC; 3h 32min ago
   Triggers: ● systemd-journald.service
       Docs: man:systemd-journald.service(8)
             man:journald.conf(5)
     Listen: /run/systemd/journal/socket (Datagram)
             /run/systemd/journal/stdout (Stream)
     CGroup: /system.slice/systemd-journald.socket

Notice: journal has been rotated since unit was started, output may be incomplete.

● systemd-rfkill.socket - Load/Save RF Kill Switch Status /dev/rfkill Watch
     Loaded: loaded (/usr/lib/systemd/system/systemd-rfkill.socket; static)
     Active: active (listening) since Fri 2026-01-30 16:48:37 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:37 UTC; 3h 32min ago
   Triggers: ● systemd-rfkill.service
       Docs: man:systemd-rfkill.socket(8)
     Listen: /dev/rfkill (Special)
     CGroup: /system.slice/systemd-rfkill.socket

Jan 30 16:48:37 np0005602931.novalocal systemd[1]: Listening on Load/Save RF Kill Switch Status /dev/rfkill Watch.

● systemd-udevd-control.socket - udev Control Socket
     Loaded: loaded (/usr/lib/systemd/system/systemd-udevd-control.socket; static)
     Active: active (running) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
   Triggers: ● systemd-udevd.service
       Docs: man:systemd-udevd-control.socket(8)
             man:udev(7)
     Listen: /run/udev/control (SequentialPacket)
     CGroup: /system.slice/systemd-udevd-control.socket

● systemd-udevd-kernel.socket - udev Kernel Socket
     Loaded: loaded (/usr/lib/systemd/system/systemd-udevd-kernel.socket; static)
     Active: active (running) since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
   Triggers: ● systemd-udevd.service
       Docs: man:systemd-udevd-kernel.socket(8)
             man:udev(7)
     Listen: kobject-uevent 1 (Netlink)
     CGroup: /system.slice/systemd-udevd-kernel.socket

○ virtinterfaced-admin.socket - libvirt interface daemon admin socket
     Loaded: loaded (/usr/lib/systemd/system/virtinterfaced-admin.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtinterfaced.service
     Listen: /run/libvirt/virtinterfaced-admin-sock (Stream)

○ virtinterfaced-ro.socket - libvirt interface daemon read-only socket
     Loaded: loaded (/usr/lib/systemd/system/virtinterfaced-ro.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtinterfaced.service
     Listen: /run/libvirt/virtinterfaced-sock-ro (Stream)

○ virtinterfaced.socket - libvirt interface daemon socket
     Loaded: loaded (/usr/lib/systemd/system/virtinterfaced.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtinterfaced.service
     Listen: /run/libvirt/virtinterfaced-sock (Stream)

○ virtlockd-admin.socket - libvirt locking daemon admin socket
     Loaded: loaded (/usr/lib/systemd/system/virtlockd-admin.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtlockd.service
     Listen: /run/libvirt/virtlockd-admin-sock (Stream)

● virtlockd.socket - libvirt locking daemon socket
     Loaded: loaded (/usr/lib/systemd/system/virtlockd.socket; enabled; preset: enabled)
     Active: active (listening) since Fri 2026-01-30 17:39:34 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:34 UTC; 2h 41min ago
   Triggers: ● virtlockd.service
     Listen: /run/libvirt/virtlockd-sock (Stream)
     CGroup: /system.slice/virtlockd.socket

Jan 30 17:39:34 compute-1 systemd[1]: Listening on libvirt locking daemon socket.

● virtlogd-admin.socket - libvirt logging daemon admin socket
     Loaded: loaded (/usr/lib/systemd/system/virtlogd-admin.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtlogd-admin.socket.d
             └─override.conf
     Active: active (running) since Fri 2026-01-30 17:39:30 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:30 UTC; 2h 41min ago
   Triggers: ● virtlogd.service
     Listen: /run/libvirt/virtlogd-admin-sock (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 4.0K (peak: 512.0K)
        CPU: 1ms
     CGroup: /system.slice/virtlogd-admin.socket

Jan 30 17:39:30 compute-1 systemd[1]: Starting libvirt logging daemon admin socket...
Jan 30 17:39:30 compute-1 systemd[1]: Listening on libvirt logging daemon admin socket.

● virtlogd.socket - libvirt logging daemon socket
     Loaded: loaded (/usr/lib/systemd/system/virtlogd.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtlogd.socket.d
             └─override.conf
     Active: active (running) since Fri 2026-01-30 17:39:30 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:30 UTC; 2h 41min ago
   Triggers: ● virtlogd.service
     Listen: /run/libvirt/virtlogd-sock (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 0B (peak: 512.0K)
        CPU: 2ms
     CGroup: /system.slice/virtlogd.socket

Jan 30 17:39:30 compute-1 systemd[1]: Starting libvirt logging daemon socket...
Jan 30 17:39:30 compute-1 systemd[1]: Listening on libvirt logging daemon socket.

○ virtnetworkd-admin.socket - libvirt network daemon admin socket
     Loaded: loaded (/usr/lib/systemd/system/virtnetworkd-admin.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtnetworkd.service
     Listen: /run/libvirt/virtnetworkd-admin-sock (Stream)

○ virtnetworkd-ro.socket - libvirt network daemon read-only socket
     Loaded: loaded (/usr/lib/systemd/system/virtnetworkd-ro.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtnetworkd.service
     Listen: /run/libvirt/virtnetworkd-sock-ro (Stream)

○ virtnetworkd.socket - libvirt network daemon socket
     Loaded: loaded (/usr/lib/systemd/system/virtnetworkd.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtnetworkd.service
     Listen: /run/libvirt/virtnetworkd-sock (Stream)

● virtnodedevd-admin.socket - libvirt nodedev daemon admin socket
     Loaded: loaded (/usr/lib/systemd/system/virtnodedevd-admin.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtnodedevd-admin.socket.d
             └─override.conf
     Active: active (running) since Fri 2026-01-30 17:39:31 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:31 UTC; 2h 41min ago
   Triggers: ● virtnodedevd.service
     Listen: /run/libvirt/virtnodedevd-admin-sock (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 0B (peak: 512.0K)
        CPU: 2ms
     CGroup: /system.slice/virtnodedevd-admin.socket

Jan 30 17:39:31 compute-1 systemd[1]: Starting libvirt nodedev daemon admin socket...
Jan 30 17:39:31 compute-1 systemd[1]: Listening on libvirt nodedev daemon admin socket.

● virtnodedevd-ro.socket - libvirt nodedev daemon read-only socket
     Loaded: loaded (/usr/lib/systemd/system/virtnodedevd-ro.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtnodedevd-ro.socket.d
             └─override.conf
     Active: active (running) since Fri 2026-01-30 17:39:31 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:31 UTC; 2h 41min ago
   Triggers: ● virtnodedevd.service
     Listen: /run/libvirt/virtnodedevd-sock-ro (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 0B (peak: 512.0K)
        CPU: 2ms
     CGroup: /system.slice/virtnodedevd-ro.socket

Jan 30 17:39:31 compute-1 systemd[1]: Starting libvirt nodedev daemon read-only socket...
Jan 30 17:39:31 compute-1 systemd[1]: Listening on libvirt nodedev daemon read-only socket.

● virtnodedevd.socket - libvirt nodedev daemon socket
     Loaded: loaded (/usr/lib/systemd/system/virtnodedevd.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtnodedevd.socket.d
             └─override.conf
     Active: active (running) since Fri 2026-01-30 17:39:31 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:31 UTC; 2h 41min ago
   Triggers: ● virtnodedevd.service
     Listen: /run/libvirt/virtnodedevd-sock (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 0B (peak: 512.0K)
        CPU: 2ms
     CGroup: /system.slice/virtnodedevd.socket

Jan 30 17:39:31 compute-1 systemd[1]: Starting libvirt nodedev daemon socket...
Jan 30 17:39:31 compute-1 systemd[1]: Listening on libvirt nodedev daemon socket.

○ virtnwfilterd-admin.socket - libvirt nwfilter daemon admin socket
     Loaded: loaded (/usr/lib/systemd/system/virtnwfilterd-admin.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtnwfilterd.service
     Listen: /run/libvirt/virtnwfilterd-admin-sock (Stream)

○ virtnwfilterd-ro.socket - libvirt nwfilter daemon read-only socket
     Loaded: loaded (/usr/lib/systemd/system/virtnwfilterd-ro.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtnwfilterd.service
     Listen: /run/libvirt/virtnwfilterd-sock-ro (Stream)

○ virtnwfilterd.socket - libvirt nwfilter daemon socket
     Loaded: loaded (/usr/lib/systemd/system/virtnwfilterd.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtnwfilterd.service
     Listen: /run/libvirt/virtnwfilterd-sock (Stream)

● virtproxyd-admin.socket - libvirt proxy daemon admin socket
     Loaded: loaded (/usr/lib/systemd/system/virtproxyd-admin.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtproxyd-admin.socket.d
             └─override.conf
     Active: active (listening) since Fri 2026-01-30 17:39:32 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:32 UTC; 2h 41min ago
   Triggers: ● virtproxyd.service
     Listen: /run/libvirt/libvirt-admin-sock (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 0B (peak: 436.0K)
        CPU: 2ms
     CGroup: /system.slice/virtproxyd-admin.socket

Jan 30 17:39:32 compute-1 systemd[1]: Starting libvirt proxy daemon admin socket...
Jan 30 17:39:32 compute-1 systemd[1]: Listening on libvirt proxy daemon admin socket.

● virtproxyd-ro.socket - libvirt proxy daemon read-only socket
     Loaded: loaded (/usr/lib/systemd/system/virtproxyd-ro.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtproxyd-ro.socket.d
             └─override.conf
     Active: active (listening) since Fri 2026-01-30 17:39:32 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:32 UTC; 2h 41min ago
   Triggers: ● virtproxyd.service
     Listen: /run/libvirt/libvirt-sock-ro (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 4.0K (peak: 512.0K)
        CPU: 2ms
     CGroup: /system.slice/virtproxyd-ro.socket

Jan 30 17:39:32 compute-1 systemd[1]: Starting libvirt proxy daemon read-only socket...
Jan 30 17:39:32 compute-1 systemd[1]: Listening on libvirt proxy daemon read-only socket.

● virtproxyd-tls.socket - libvirt proxy daemon TLS IP socket
     Loaded: loaded (/usr/lib/systemd/system/virtproxyd-tls.socket; enabled; preset: disabled)
     Active: active (listening) since Fri 2026-01-30 17:38:29 UTC; 2h 42min ago
      Until: Fri 2026-01-30 17:38:29 UTC; 2h 42min ago
   Triggers: ● virtproxyd.service
     Listen: [::]:16514 (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 8.0K (peak: 256.0K)
        CPU: 1ms
     CGroup: /system.slice/virtproxyd-tls.socket

Jan 30 17:38:29 compute-1 systemd[1]: Listening on libvirt proxy daemon TLS IP socket.

● virtproxyd.socket - libvirt proxy daemon socket
     Loaded: loaded (/usr/lib/systemd/system/virtproxyd.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtproxyd.socket.d
             └─override.conf
     Active: active (listening) since Fri 2026-01-30 17:38:29 UTC; 2h 42min ago
      Until: Fri 2026-01-30 17:38:29 UTC; 2h 42min ago
   Triggers: ● virtproxyd.service
     Listen: /run/libvirt/libvirt-sock (Stream)
     CGroup: /system.slice/virtproxyd.socket

Jan 30 17:38:29 compute-1 systemd[1]: Listening on libvirt proxy daemon socket.

● virtqemud-admin.socket - libvirt QEMU daemon admin socket
     Loaded: loaded (/usr/lib/systemd/system/virtqemud-admin.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtqemud-admin.socket.d
             └─override.conf
     Active: active (running) since Fri 2026-01-30 17:39:34 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:34 UTC; 2h 41min ago
   Triggers: ● virtqemud.service
     Listen: /run/libvirt/virtqemud-admin-sock (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 0B (peak: 512.0K)
        CPU: 2ms
     CGroup: /system.slice/virtqemud-admin.socket

Jan 30 17:39:34 compute-1 systemd[1]: Starting libvirt QEMU daemon admin socket...
Jan 30 17:39:34 compute-1 systemd[1]: Listening on libvirt QEMU daemon admin socket.

● virtqemud-ro.socket - libvirt QEMU daemon read-only socket
     Loaded: loaded (/usr/lib/systemd/system/virtqemud-ro.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtqemud-ro.socket.d
             └─override.conf
     Active: active (running) since Fri 2026-01-30 17:39:34 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:34 UTC; 2h 41min ago
   Triggers: ● virtqemud.service
     Listen: /run/libvirt/virtqemud-sock-ro (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 0B (peak: 364.0K)
        CPU: 2ms
     CGroup: /system.slice/virtqemud-ro.socket

Jan 30 17:39:34 compute-1 systemd[1]: Starting libvirt QEMU daemon read-only socket...
Jan 30 17:39:34 compute-1 systemd[1]: Listening on libvirt QEMU daemon read-only socket.

● virtqemud.socket - libvirt QEMU daemon socket
     Loaded: loaded (/usr/lib/systemd/system/virtqemud.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtqemud.socket.d
             └─override.conf
     Active: active (running) since Fri 2026-01-30 17:39:34 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:34 UTC; 2h 41min ago
   Triggers: ● virtqemud.service
     Listen: /run/libvirt/virtqemud-sock (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 0B (peak: 512.0K)
        CPU: 3ms
     CGroup: /system.slice/virtqemud.socket

Jan 30 17:39:34 compute-1 systemd[1]: Starting libvirt QEMU daemon socket...
Jan 30 17:39:34 compute-1 systemd[1]: Listening on libvirt QEMU daemon socket.

● virtsecretd-admin.socket - libvirt secret daemon admin socket
     Loaded: loaded (/usr/lib/systemd/system/virtsecretd-admin.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtsecretd-admin.socket.d
             └─override.conf
     Active: active (listening) since Fri 2026-01-30 17:39:35 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:35 UTC; 2h 41min ago
   Triggers: ● virtsecretd.service
     Listen: /run/libvirt/virtsecretd-admin-sock (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 0B (peak: 500.0K)
        CPU: 2ms
     CGroup: /system.slice/virtsecretd-admin.socket

Jan 30 17:39:35 compute-1 systemd[1]: Starting libvirt secret daemon admin socket...
Jan 30 17:39:35 compute-1 systemd[1]: Listening on libvirt secret daemon admin socket.

● virtsecretd-ro.socket - libvirt secret daemon read-only socket
     Loaded: loaded (/usr/lib/systemd/system/virtsecretd-ro.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtsecretd-ro.socket.d
             └─override.conf
     Active: active (listening) since Fri 2026-01-30 17:39:35 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:35 UTC; 2h 41min ago
   Triggers: ● virtsecretd.service
     Listen: /run/libvirt/virtsecretd-sock-ro (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 0B (peak: 512.0K)
        CPU: 2ms
     CGroup: /system.slice/virtsecretd-ro.socket

Jan 30 17:39:35 compute-1 systemd[1]: Starting libvirt secret daemon read-only socket...
Jan 30 17:39:35 compute-1 systemd[1]: Listening on libvirt secret daemon read-only socket.

● virtsecretd.socket - libvirt secret daemon socket
     Loaded: loaded (/usr/lib/systemd/system/virtsecretd.socket; enabled; preset: enabled)
    Drop-In: /etc/systemd/system/virtsecretd.socket.d
             └─override.conf
     Active: active (listening) since Fri 2026-01-30 17:39:35 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:39:35 UTC; 2h 41min ago
   Triggers: ● virtsecretd.service
     Listen: /run/libvirt/virtsecretd-sock (Stream)
         IO: 0B read, 0B written
      Tasks: 0 (limit: 100092)
     Memory: 4.0K (peak: 616.0K)
        CPU: 2ms
     CGroup: /system.slice/virtsecretd.socket

Jan 30 17:39:35 compute-1 systemd[1]: Starting libvirt secret daemon socket...
Jan 30 17:39:35 compute-1 systemd[1]: Listening on libvirt secret daemon socket.

○ virtstoraged-admin.socket - libvirt storage daemon admin socket
     Loaded: loaded (/usr/lib/systemd/system/virtstoraged-admin.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtstoraged.service
     Listen: /run/libvirt/virtstoraged-admin-sock (Stream)

○ virtstoraged-ro.socket - libvirt storage daemon read-only socket
     Loaded: loaded (/usr/lib/systemd/system/virtstoraged-ro.socket; enabled; preset: enabled)
     Active: inactive (dead)
   Triggers: ● virtstoraged.service
     Listen: /run/libvirt/virtstoraged-sock-ro (Stream)

○ virtstoraged.socket - libvirt storage daemon socket
     Loaded: loaded (/usr/lib/systemd/system/virtstoraged.socket; enabled; preset: enabled)
     Unit ceph.target could not be found.
Active: inactive (dead)
   Triggers: ● virtstoraged.service
     Listen: /run/libvirt/virtstoraged-sock (Stream)

● swap.swap - /swap
     Loaded: loaded (/etc/fstab; generated)
     Active: active since Fri 2026-01-30 17:26:29 UTC; 2h 54min ago
      Until: Fri 2026-01-30 17:26:29 UTC; 2h 54min ago
       What: /swap
       Docs: man:fstab(5)
             man:systemd-fstab-generator(8)

● basic.target - Basic System
     Loaded: loaded (/usr/lib/systemd/system/basic.target; static)
     Active: active since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

Jan 30 16:48:36 localhost systemd[1]: Reached target Basic System.

○ blockdev@dev-disk-by\x2duuid-822f14ea\x2d6e7e\x2d41df\x2db0d8\x2dfbe282d9ded8.target - Block Device Preparation for /dev/disk/by-uuid/822f14ea-6e7e-41df-b0d8-fbe282d9ded8
     Loaded: loaded (/usr/lib/systemd/system/blockdev@.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

○ blockdev@dev-vda1.target - Block Device Preparation for /dev/vda1
     Loaded: loaded (/usr/lib/systemd/system/blockdev@.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

● cloud-config.target - Cloud-config availability
     Loaded: loaded (/usr/lib/systemd/system/cloud-config.target; static)
     Active: active since Fri 2026-01-30 16:48:43 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:43 UTC; 3h 32min ago

Jan 30 16:48:43 np0005602931.novalocal systemd[1]: Reached target Cloud-config availability.

● cloud-init.target - Cloud-init target
     Loaded: loaded (/usr/lib/systemd/system/cloud-init.target; enabled-runtime; preset: disabled)
     Active: active since Fri 2026-01-30 16:48:44 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:44 UTC; 3h 32min ago

Jan 30 16:48:44 np0005602931.novalocal systemd[1]: Reached target Cloud-init target.

○ cryptsetup-pre.target - Local Encrypted Volumes (Pre)
     Loaded: loaded (/usr/lib/systemd/system/cryptsetup-pre.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

● cryptsetup.target - Local Encrypted Volumes
     Loaded: loaded (/usr/lib/systemd/system/cryptsetup.target; static)
     Active: active since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

● edpm_libvirt.target
     Loaded: loaded (/etc/systemd/system/edpm_libvirt.target; static)
     Active: active since Fri 2026-01-30 17:40:00 UTC; 2h 41min ago
      Until: Fri 2026-01-30 17:40:00 UTC; 2h 41min ago

Jan 30 17:40:00 compute-1 systemd[1]: Reached target edpm_libvirt.target.

○ emergency.target - Emergency Mode
     Loaded: loaded (/usr/lib/systemd/system/emergency.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

○ first-boot-complete.target - First Boot Complete
     Loaded: loaded (/usr/lib/systemd/system/first-boot-complete.target; static)
     Active: inactive (dead)
  Condition: start condition failed at Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

Jan 30 16:48:35 localhost systemd[1]: First Boot Complete was skipped because of an unmet condition check (ConditionFirstBoot=yes).

○ getty-pre.target - Preparation for Logins
     Loaded: loaded (/usr/lib/systemd/system/getty-pre.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)
             man:systemd-getty-generator(8)
             http://0pointer.de/blog/projects/serial-console.html

● getty.target - Login Prompts
     Loaded: loaded (/usr/lib/systemd/system/getty.target; static)
     Active: active since Fri 2026-01-30 16:48:44 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:44 UTC; 3h 32min ago
       Docs: man:systemd.special(7)
             man:systemd-getty-generator(8)
             http://0pointer.de/blog/projects/serial-console.html

Jan 30 16:48:44 np0005602931.novalocal systemd[1]: Reached target Login Prompts.

○ graphical.target - Graphical Interface
     Loaded: loaded (/usr/lib/systemd/system/graphical.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

○ initrd-fs.target - Initrd File Systems
     Loaded: loaded (/usr/lib/systemd/system/initrd-fs.target; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

Jan 30 16:48:33 localhost systemd[1]: Reached target Initrd File Systems.

○ initrd-root-device.target - Initrd Root Device
     Loaded: loaded (/usr/lib/systemd/system/initrd-root-device.target; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

Jan 30 16:48:32 localhost systemd[1]: Reached target Initrd Root Device.
Jan 30 16:48:34 localhost systemd[1]: Stopped target Initrd Root Device.

○ initrd-root-fs.target - Initrd Root File System
     Loaded: loaded (/usr/lib/systemd/system/initrd-root-fs.target; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

Jan 30 16:48:33 localhost systemd[1]: Reached target Initrd Root File System.

○ initrd-switch-root.target - Switch Root
     Loaded: loaded (/usr/lib/systemd/system/initrd-switch-root.target; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago

Jan 30 16:48:34 localhost systemd[1]: Reached target Switch Root.

○ initrd-usr-fs.target - Initrd /usr File System
     Loaded: loaded (/usr/lib/systemd/system/initrd-usr-fs.target; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

Jan 30 16:48:34 localhost systemd[1]: Stopped target Initrd /usr File System.
Notice: journal has been rotated since unit was started, output may be incomplete.

○ initrd.target - Initrd Default Target
     Loaded: loaded (/usr/lib/systemd/system/initrd.target; static)
     Active: inactive (dead) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

Jan 30 16:48:33 localhost systemd[1]: Reached target Initrd Default Target.
Jan 30 16:48:34 localhost systemd[1]: Stopped target Initrd Default Target.

● integritysetup.target - Local Integrity Protected Volumes
     Loaded: loaded (/usr/lib/systemd/system/integritysetup.target; static)
     Active: active since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

● local-fs-pre.target - Preparation for Local File Systems
     Loaded: loaded (/usr/lib/systemd/system/local-fs-pre.target; static)
     Active: active since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

Jan 30 16:48:35 localhost systemd[1]: Reached target Preparation for Local File Systems.

● local-fs.target - Local File Systems
     Loaded: loaded (/usr/lib/systemd/system/local-fs.target; static)
     Active: active since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

Jan 30 16:48:35 localhost systemd[1]: Reached target Local File Systems.

● multi-user.target - Multi-User System
     Loaded: loaded (/usr/lib/systemd/system/multi-user.target; indirect; preset: disabled)
     Active: active since Fri 2026-01-30 16:48:44 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:44 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

Jan 30 16:48:44 np0005602931.novalocal systemd[1]: Reached target Multi-User System.

● network-online.target - Network is Online
     Loaded: loaded (/usr/lib/systemd/system/network-online.target; static)
     Active: active since Fri 2026-01-30 16:48:43 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:43 UTC; 3h 32min ago
       Docs: man:systemd.special(7)
             https://systemd.io/NETWORK_ONLINE

Jan 30 16:48:43 np0005602931.novalocal systemd[1]: Reached target Network is Online.

● network-pre.target - Preparation for Network
     Loaded: loaded (/usr/lib/systemd/system/network-pre.target; static)
     Active: active since Fri 2026-01-30 16:48:37 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:37 UTC; 3h 32min ago
       Docs: man:systemd.special(7)
             https://systemd.io/NETWORK_ONLINE

Jan 30 16:48:37 np0005602931.novalocal systemd[1]: Reached target Preparation for Network.

● network.target - Network
     Loaded: loaded (/usr/lib/systemd/system/network.target; static)
     Active: active since Fri 2026-01-30 16:48:37 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:37 UTC; 3h 32min ago
       Docs: man:systemd.special(7)
             https://systemd.io/NETWORK_ONLINE

Jan 30 16:48:37 np0005602931.novalocal systemd[1]: Reached target Network.

● nfs-client.target - NFS client services
     Loaded: loaded (/usr/lib/systemd/system/nfs-client.target; enabled; preset: disabled)
     Active: active since Fri 2026-01-30 16:48:37 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:37 UTC; 3h 32min ago

Jan 30 16:48:37 np0005602931.novalocal systemd[1]: Reached target NFS client services.

○ nss-lookup.target - Host and Network Name Lookups
     Loaded: loaded (/usr/lib/systemd/system/nss-lookup.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

● nss-user-lookup.target - User and Group Name Lookups
     Loaded: loaded (/usr/lib/systemd/system/nss-user-lookup.target; static)
     Active: active since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

Jan 30 16:48:36 localhost systemd[1]: Reached target User and Group Name Lookups.

● paths.target - Path Units
     Loaded: loaded (/usr/lib/systemd/system/paths.target; static)
     Active: active since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

○ remote-cryptsetup.target - Remote Encrypted Volumes
     Loaded: loaded (/usr/lib/systemd/system/remote-cryptsetup.target; disabled; preset: enabled)
     Active: inactive (dead) since Fri 2026-01-30 16:48:34 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

Jan 30 16:48:33 localhost systemd[1]: Reached target Remote Encrypted Volumes.
Jan 30 16:48:34 localhost systemd[1]: Stopped target Remote Encrypted Volumes.

● remote-fs-pre.target - Preparation for Remote File Systems
     Loaded: loaded (/usr/lib/systemd/system/remote-fs-pre.target; static)
     Active: active since Fri 2026-01-30 16:48:37 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:37 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

Jan 30 16:48:37 np0005602931.novalocal systemd[1]: Reached target Preparation for Remote File Systems.

● remote-fs.target - Remote File Systems
     Loaded: loaded (/usr/lib/systemd/system/remote-fs.target; enabled; preset: enabled)
     Active: active since Fri 2026-01-30 16:48:37 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:37 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

Jan 30 16:48:37 np0005602931.novalocal systemd[1]: Reached target Remote File Systems.

○ remote-veritysetup.target - Remote Verity Protected Volumes
     Loaded: loaded (/usr/lib/systemd/system/remote-veritysetup.target; disabled; preset: disabled)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

○ rescue.target - Rescue Mode
     Loaded: loaded (/usr/lib/systemd/system/rescue.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

● rpc_pipefs.target
     Loaded: loaded (/usr/lib/systemd/system/rpc_pipefs.target; static)
     Active: active since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:35 UTC; 3h 32min ago

● rpcbind.target - RPC Port Mapper
     Loaded: loaded (/usr/lib/systemd/system/rpcbind.target; static)
     Active: active since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

○ shutdown.target - System Shutdown
     Loaded: loaded (/usr/lib/systemd/system/shutdown.target; static)
     AUnit syslog.target could not be found.
ctive: inactive (dead)
       Docs: man:systemd.special(7)

● slices.target - Slice Units
     Loaded: loaded (/usr/lib/systemd/system/slices.target; static)
     Active: active since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

● sockets.target - Socket Units
     Loaded: loaded (/usr/lib/systemd/system/sockets.target; static)
     Active: active since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

Jan 30 16:48:36 localhost systemd[1]: Reached target Socket Units.

● sshd-keygen.target
     Loaded: loaded (/usr/lib/systemd/system/sshd-keygen.target; static)
     Active: active since Fri 2026-01-30 17:37:28 UTC; 2h 43min ago
      Until: Fri 2026-01-30 17:37:28 UTC; 2h 43min ago

Jan 30 17:37:28 compute-1 systemd[1]: Reached target sshd-keygen.target.

● swap.target - Swaps
     Loaded: loaded (/usr/lib/systemd/system/swap.target; static)
     Active: active since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

● sysinit.target - System Initialization
     Loaded: loaded (/usr/lib/systemd/system/sysinit.target; static)
     Active: active since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

Jan 30 16:48:36 localhost systemd[1]: Reached target System Initialization.

○ time-set.target - System Time Set
     Loaded: loaded (/usr/lib/systemd/system/time-set.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

○ time-sync.target - System Time Synchronized
     Loaded: loaded (/usr/lib/systemd/system/time-sync.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

● timers.target - Timer Units
     Loaded: loaded (/usr/lib/systemd/system/timers.target; static)
     Active: active since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

Jan 30 16:48:36 localhost systemd[1]: Reached target Timer Units.

○ umount.target - Unmount All Filesystems
     Loaded: loaded (/usr/lib/systemd/system/umount.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

○ veritysetup-pre.target - Local Verity Protected Volumes (Pre)
     Loaded: loaded (/usr/lib/systemd/system/veritysetup-pre.target; static)
     Active: inactive (dead)
       Docs: man:systemd.special(7)

● veritysetup.target - Local Verity Protected Volumes
     Loaded: loaded (/usr/lib/systemd/system/veritysetup.target; static)
     Active: active since Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:35 UTC; 3h 32min ago
       Docs: man:systemd.special(7)

○ virt-guest-shutdown.target - Libvirt guests shutdown
     Loaded: loaded (/etc/systemd/system/virt-guest-shutdown.target; static)
     Active: inactive (dead)
       Docs: https://libvirt.org

● 407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a-41b4adf9ee4505ef.timer - /usr/bin/podman healthcheck run 407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a
     Loaded: loaded (/run/systemd/transient/407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a-41b4adf9ee4505ef.timer; transient)
  Transient: yes
     Active: active (waiting) since Fri 2026-01-30 17:43:36 UTC; 2h 37min ago
      Until: Fri 2026-01-30 17:43:36 UTC; 2h 37min ago
    Trigger: Fri 2026-01-30 20:21:26 UTC; 465ms left
   Triggers: ● 407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a-41b4adf9ee4505ef.service

Jan 30 17:43:36 compute-1 systemd[1]: Started /usr/bin/podman healthcheck run 407a4f30d99aa614fa16a01068678a24941c99545fcaf600c35e8f3118ec0b7a.

● 4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd-592662485594b6f2.timer - /usr/bin/podman healthcheck run 4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd
     Loaded: loaded (/run/systemd/transient/4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd-592662485594b6f2.timer; transient)
  Transient: yes
     Active: active (waiting) since Fri 2026-01-30 17:43:22 UTC; 2h 38min ago
      Until: Fri 2026-01-30 17:43:22 UTC; 2h 38min ago
    Trigger: Fri 2026-01-30 20:21:45 UTC; 19s left
   Triggers: ● 4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd-592662485594b6f2.service

Jan 30 17:43:22 compute-1 systemd[1]: Started /usr/bin/podman healthcheck run 4d13a8233c89e274dd79ce9661273f1419b5813298c88347f5485a6a1372bcbd.

● 834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20-309012fbd99c6d44.timer - /usr/bin/podman healthcheck run 834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20
     Loaded: loaded (/run/systemd/transient/834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20-309012fbd99c6d44.timer; transient)
  Transient: yes
     Active: active (waiting) since Fri 2026-01-30 17:35:03 UTC; 2h 46min ago
      Until: Fri 2026-01-30 17:35:03 UTC; 2h 46min ago
    Trigger: Fri 2026-01-30 20:21:26 UTC; 441ms left
   Triggers: ● 834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20-309012fbd99c6d44.service

Jan 30 17:35:03 compute-1 systemd[1]: Started /usr/bin/podman healthcheck run 834296de81f6daec7a4c219ce0ebb340008185429cd08d2924973cfd6cb7bf20.

● 883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037-70e2637eddb374c4.timer - /usr/bin/podman healthcheck run 883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037
     Loaded: loaded (/run/systemd/transient/883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037-70e2637eddb374c4.timer; transient)
  Transient: yes
     Active: active (waiting) since Fri 2026-01-30 19:32:55 UTC; 48min ago
      Until: Fri 2026-01-30 19:32:55 UTC; 48min ago
    Trigger: Fri 2026-01-30 20:21:26 UTC; 487ms left
   Triggers: ● 883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037-70e2637eddb374c4.service

Jan 30 19:32:55 compute-1 systemd[1]: Started /usr/bin/podman healthcheck run 883fb7cfe2203bf00a05ba06e32ac6d75b1eea91f3eb993ebd6337f89bc2b037.

● bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98-7fd10ef3aba79581.timer - /usr/bin/podman healthcheck run bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98
     Loaded: loaded (/run/systemd/transient/bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98-7fd10ef3aba79581.timer; transient)
  Transient: yes
     Active: active (waiting) since Fri 2026-01-30 17:43:54 UTC; 2h 37min ago
      Until: Fri 2026-01-30 17:43:54 UTC; 2h 37min ago
    Trigger: Fri 2026-01-30 20:21:45 UTC; 19s left
   Triggers: ● bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98-7fd10ef3aba79581.service

Jan 30 17:43:54 compute-1 systemd[1]: Started /usr/bin/podman healthcheck run bc1fd8973be3bc9d4936aa84a6c4aca49bb3f2d3d3c99b66c06c7a9250bacf98.

● dnf-makecache.timer - dnf makecache --timer
     Loaded: loaded (/usr/lib/systemd/system/dnf-makecache.timer; enabled; preset: enabled)
     Active: active (waiting) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
    Trigger: Fri 2026-01-30 21:46:32 UTC; 1h 25min left
   Triggers: ● dnf-makecache.service

Jan 30 16:48:36 localhost systemd[1]: Started dnf makecache --timer.

● e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a-7054953e52e79992.timer - /usr/bin/podman healthcheck run e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a
     Loaded: loaded (/run/systemd/transient/e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a-7054953e52e79992.timer; transient)
  Transient: yes
     Active: active (waiting) since Fri 2026-01-30 17:44:14 UTC; 2h 37min ago
      Until: Fri 2026-01-30 17:44:14 UTC; 2h 37min ago
    Trigger: Fri 2026-01-30 20:21:35 UTC; 9s left
   Triggers: ● e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a-7054953e52e79992.service

Jan 30 17:44:14 compute-1 systemd[1]: Started /usr/bin/podman healthcheck run e8e72332e5ed9ce67296b54f9e8812b8eb9cfe0b4b734156f2bb977fc1676e9a.

● logrotate.timer - Daily rotation of log files
     Loaded: loaded (/usr/lib/systemd/system/logrotate.timer; enabled; preset: enabled)
     Active: active (waiting) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
    Trigger: Sat 2026-01-31 00:00:00 UTC; 3h 38min left
   Triggers: ● logrotate.service
       Docs: man:logrotate(8)
             man:logrotate.conf(5)

Jan 30 16:48:36 localhost systemd[1]: Started Daily rotation of log files.

○ raid-check.timer - Weekly RAID setup health check
     Loaded: loaded (/usr/lib/systemd/system/raid-check.timer; enabled; preset: enabled)
     Active: inactive (dead)
    Trigger: n/a
   Triggers: ● raid-check.service

○ sysstat-collect.timer - Run system activity accounting tool every 10 minutes
     Loaded: loaded (/usr/lib/systemd/system/sysstat-collect.timer; enabled; preset: disabled)
     Active: inactive (dead)
    Trigger: n/a
   Triggers: ● sysstat-collect.service

○ sysstat-summary.timer - Generate summary of yesterday's process accounting
     Loaded: loaded (/usr/lib/systemd/system/sysstat-summary.timer; enabled; preset: disabled)
     Active: inactive (dead)
    Trigger: n/a
   Triggers: ● sysstat-summary.service

● systemd-tmpfiles-clean.timer - Daily Cleanup of Temporary Directories
     Loaded: loaded (/usr/lib/systemd/system/systemd-tmpfiles-clean.timer; static)
     Active: active (waiting) since Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
      Until: Fri 2026-01-30 16:48:36 UTC; 3h 32min ago
    Trigger: Sat 2026-01-31 17:03:38 UTC; 20h left
   Triggers: ● systemd-tmpfiles-clean.service
       Docs: man:tmpfiles.d(5)
             man:systemd-tmpfiles(8)

Jan 30 16:48:36 localhost systemd[1]: Started Daily Cleanup of Temporary Directories.

● unbound-anchor.timer - daily update of the root trust anchor for DNSSEC
     Loaded: loaded (/usr/lib/systemd/system/unbound-anchor.timer; enabled; preset: enabled)
     Active: active (waiting) since Fri 2026-01-30 17:29:02 UTC; 2h 52min ago
      Until: Fri 2026-01-30 17:29:02 UTC; 2h 52min ago
    Trigger: Sat 2026-01-31 00:00:00 UTC; 3h 38min left
   Triggers: ● unbound-anchor.service
       Docs: man:unbound-anchor(8)

Jan 30 17:29:02 compute-1 systemd[1]: Started daily update of the root trust anchor for DNSSEC.
